# Core
library(tidyverse)
## ── Attaching packages ─────────────────────────────────────── tidyverse 1.3.1 ──
## ✓ ggplot2 3.3.5     ✓ purrr   0.3.4
## ✓ tibble  3.1.6     ✓ dplyr   1.0.7
## ✓ tidyr   1.1.4     ✓ stringr 1.4.0
## ✓ readr   2.1.1     ✓ forcats 0.5.1
## ── Conflicts ────────────────────────────────────────── tidyverse_conflicts() ──
## x dplyr::filter() masks stats::filter()
## x dplyr::lag()    masks stats::lag()
library(grid)
library(gridExtra)
## 
## Attaching package: 'gridExtra'
## The following object is masked from 'package:dplyr':
## 
##     combine
# Load data
library(readxl)

# Time Series
library(timetk)
## Registered S3 method overwritten by 'tune':
##   method                   from   
##   required_pkgs.model_spec parsnip
library(forecast)
## Registered S3 method overwritten by 'quantmod':
##   method            from
##   as.zoo.data.frame zoo
library(TSA)
## Registered S3 methods overwritten by 'TSA':
##   method       from    
##   fitted.Arima forecast
##   plot.Arima   forecast
## 
## Attaching package: 'TSA'
## The following object is masked from 'package:readr':
## 
##     spec
## The following objects are masked from 'package:stats':
## 
##     acf, arima
## The following object is masked from 'package:utils':
## 
##     tar
library(NTS)
library(MSwM)
## Loading required package: parallel
library(tsDyn)
library(fNonlinear)
## Loading required package: timeDate
## 
## Attaching package: 'timeDate'
## The following objects are masked from 'package:TSA':
## 
##     kurtosis, skewness
## Loading required package: timeSeries
## Loading required package: fBasics
library(dlm)
## 
## Attaching package: 'dlm'
## The following object is masked from 'package:ggplot2':
## 
##     %+%
library(astsa)
## 
## Attaching package: 'astsa'
## The following object is masked from 'package:fBasics':
## 
##     nyse
## The following object is masked from 'package:forecast':
## 
##     gas
library(seasonal)
## 
## Attaching package: 'seasonal'
## The following object is masked from 'package:astsa':
## 
##     unemp
## The following objects are masked from 'package:timeSeries':
## 
##     outlier, series
## The following object is masked from 'package:tibble':
## 
##     view
library(tseries)
library(astsa)
library(tsoutliers)
library(urca)

# Machine Learning
library(tidymodels)
## ── Attaching packages ────────────────────────────────────── tidymodels 0.1.4 ──
## ✓ broom        0.7.11     ✓ rsample      0.1.1 
## ✓ dials        0.0.10     ✓ tune         0.1.6 
## ✓ infer        1.0.0      ✓ workflows    0.2.4 
## ✓ modeldata    0.1.1      ✓ workflowsets 0.1.0 
## ✓ parsnip      0.1.7      ✓ yardstick    0.0.9 
## ✓ recipes      0.1.17
## Warning: package 'broom' was built under R version 4.1.2
## ── Conflicts ───────────────────────────────────────── tidymodels_conflicts() ──
## x dlm::%+%()            masks ggplot2::%+%()
## x yardstick::accuracy() masks forecast::accuracy()
## x gridExtra::combine()  masks dplyr::combine()
## x scales::discard()     masks purrr::discard()
## x timeSeries::filter()  masks dplyr::filter(), stats::filter()
## x recipes::fixed()      masks stringr::fixed()
## x timeSeries::lag()     masks dplyr::lag(), stats::lag()
## x yardstick::spec()     masks TSA::spec(), readr::spec()
## x recipes::step()       masks stats::step()
## x seasonal::view()      masks tibble::view()
## • Dig deeper into tidy modeling with R at https://www.tmwr.org
library(modeltime)
## 
## Attaching package: 'modeltime'
## The following object is masked from 'package:seasonal':
## 
##     trend
## The following object is masked from 'package:TSA':
## 
##     season
library(modeltime.ensemble)
## Loading required package: modeltime.resample
library(modeltime.resample)

library(timetk)
getPerformance = function(pred, val) {
    res = pred - val
    MAE = sum(abs(res))/length(val)
    RSS = sum(res^2)
    MSE = RSS/length(val)
    RMSE = sqrt(MSE)
    perf = data.frame(MAE, RSS, MSE, RMSE)
    return(perf)
}

#getPerformance(pred, val)

1.Introducción

2.Metodología

2.1. Datos

2.2. Modelo Lineal

2.3. Modelo No Lineal

2.4. Modelo Machine Learning

2.4.1. Validación Cruzada

2.5. Métricas de Rendimiento

3. Resultados

El presente apartado esta dividido en dos secciones las cuales muestran los resultados obtenidos que buscan respaldar el objetivo planteado. La primera sección se compara y selecciona el mejor modelo de pronostico de serie de tiempo según el tipo de modelo: lineales, no lineales y de minería de datos, para posteriormente, realizar un ensamble con los mejores tres métodos. La segunda sección presenta una prueba de tensión en el cual se plantearán diferentes escenarios para estimar el potencial impacto de una caída abrupta de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dólares en Costa Rica para diciembre 2021.

3.1. Análisis Exploratorio

En la figura @ref(fig:evolucionserie) se muestran los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares para febrero-2001 a julio-2021 y de la cual se genera el análisis para identificar las siguientes características: tendencias o ciclos, existencia de estabilidad en las observaciones, variancia de las observaciones (constante o variable en el tiempo), existencia de observaciones inusuales y de puntos extremos, cambios en la estructura de la serie, entre otras.

SeriesDatos <- read_excel("~/Google Drive/Mi unidad/1.Maestria II Ciclo 2021/Curso de Analisis De Casos/Caso II/Datos/Base Datos.xlsx")%>%
  janitor::clean_names()%>%
  mutate(ActivoNeto=paste0(activo_neto,"-01"))%>%
  rename('ActNetCRC'=crc,
         'ActNetUSD'=usd)

actnetcrc<- ts(SeriesDatos[,2],start =c(2001,2),end=c(2021,7), frequency = 12)
actnetusd<- ts(SeriesDatos[,3],start =c(2001,2),end=c(2021,7), frequency = 12)
actnet <- cbind(actnetcrc,actnetusd) 

fitcrc<-actnetcrc %>% 
  seas() 

fitusd<- actnetusd %>% 
  seas() 
pseries<-autoplot(actnet,facets=TRUE) +
  xlab("Mes") +
  ylab("Millones")+
  theme_bw()

ptendseriecr<-autoplot(actnetcrc, series="Data") +
  autolayer(trendcycle(fitcrc), series="Tendencia") +
  #autolayer(seasadj(fitcrc), series="Ajustada Estacionalmente") +
  xlab("Mes") + ylab("Millones") +
  scale_colour_manual(values=c("grey70","red","royalblue4"),
             breaks=c("Data","Ajustada Estacionalmente","Tendencia"))+
  theme_bw()+
  ggtitle("Colones")+
   geom_vline(xintercept = 2015 + (06 - 1) / 12,linetype = "dashed", colour ='gray' )+
   geom_vline(xintercept = 2016 + (11 - 1) / 12,linetype = "dashed", colour ='gray' )+
  scale_y_continuous(breaks = seq(0,1200000,200000))

ptendserieusd<-autoplot(actnetusd, series="Data") +
  autolayer(trendcycle(fitusd), series="Tendencia") +
  #autolayer(seasadj(fitusd), series="Ajustada Estacionalmente") +
  xlab("Mes") + ylab("Saldos") +
  ggtitle("Dolares") +
  scale_colour_manual(values=c("grey70","red","royalblue4"),
             breaks=c("Data","Ajustada Estacionalmente","Tendencia"))+
  theme_bw()+
   geom_vline(xintercept = 2015 + (06 - 1) / 12,linetype = "dashed", colour ='gray' )+
   geom_vline(xintercept = 2016 + (12 - 1) / 12,linetype = "dashed", colour ='gray' )+
  scale_y_continuous(breaks = seq(0,2000,250)) 

grid.arrange(ptendseriecr, ptendserieusd, ncol = 1)
Costa Rica:Evolución de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero
 en colones y dolares, febrero-2001 a julio-2021

Costa Rica:Evolución de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares, febrero-2001 a julio-2021

#https://www.sepg.pap.hacienda.gob.es/sitios/sepg/es-ES/Presupuestos/DocumentacionEstadisticas/Documentacion/Documents/DOCUMENTOS%20DE%20TRABAJO/D95006.pdf

otlier_crc<- tso(y = actnetcrc,types=c("SLS","AO","LS","TC","IO"))
## Warning in locate.outliers.iloop(resid = resid, pars = pars, cval = cval, :
## stopped when 'maxit.iloop' was reached

## Warning in locate.outliers.iloop(resid = resid, pars = pars, cval = cval, :
## stopped when 'maxit.iloop' was reached

## Warning in locate.outliers.iloop(resid = resid, pars = pars, cval = cval, :
## stopped when 'maxit.iloop' was reached
## Warning in locate.outliers.oloop(y = y, fit = fit, types = types, cval = cval, :
## stopped when 'maxit.oloop = 4' was reached
plot(otlier_crc)

# otlier_usd<- tso(y = actnetusd,types=c("SLS","AO","LS","TC","IO"))
# otlier_usd

A partir del análisis de la serie se identificaron las siguientes característica:

  • Para ambas series del activo neto , colones y dolares, se observa una tendencia creciente desde febrero 2001, así como un aumento de la variabilidad conforme aumenta los meses.

  • Para el periodo de mayo 2015 a octubre 2016 (lineas punteadas gris) hay un cambio de nivel (Valor extremo LS[^4]) en el volumen mensual del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero tanto en colones como en dolares, sin embargo, es inverso en ambas series, lo cual sugiere que posiblemente los participantes trasladaron sus inversiones de dolares a colones, esto se explica por:

    • La adopción del régimen de tipo de cambio de flotación administrada por parte del Banco Central de Costa Rica (BCCR) y el incremento en el superávit de divisas del sector privado incidió en la apreciación del colón (disminución del tipo de cambio) [@bccr5].

    • La reducción de la tasa de interés de política monetaria por parte del BCCR en 300 puntos base en el 2015, con el objetivo de estimular la economía, promoviendo el crecimiento en el crédito nacional y para reducir el costo de la deuda para el gobierno [@mv1; @mv2].

    • En el último trimestre del 2015, la industria tuvo una contracción de la liquidez en dolares, explicado por la salida de los participantes hacia el mercado internacional [@mv2].

  • Para el activo neto en colones se observa que en abril 2020 el activo neto en colones creció en 19.5 por ciento respecto al mismo periodo del año pasado, este comportamiento creciente y acelerado se mantuvo hasta diciembre de ese mismo año. Lo cual coincide con el efecto de la crisis sanitaria por COVID-19 que inicio en Costa Rica en marzo 2020, esta fecha es identificada como un valor extremo de cambio temporal [^4]. Esta situación sanitaria provocó un aumento de la incertidumbre en la economía mundial incidiendo en que los agentes económicos buscaran refugiarse en activos líquidos [@bccr1]. Un comportamiento similar ocurre para el activo neto en dolares.

  • Respecto a la estacionalidad de las series, se observa en la figura @ref(fig:estacionalidad) que para el caso de colones los saldos del activo neto tienden a ser mayores en enero y octubre, y presentar valores relativamente bajos al finalizar el año noviembre y diciembre, esto es de esperar debido a la época navideña y que diciembre comúnmente se labora 3 de las 4 semana del mes. Para el caso de dolares se observa que los meses con mayores saldos del activo neto se dan de mayo a agosto, y al igual que el caso de colones, se observa que los dos últimos meses del año los mismos se reduce.

pestacioncr <- fitcrc %>% 
  seasonal() %>% 
  ggsubseriesplot() + 
  ylab("Estacionalidad")+
  theme_bw()+
  ggtitle("Colones")

pestacionusd <- fitusd %>% 
  seasonal() %>% 
  ggsubseriesplot() + 
  ylab("Estacionalidad")+
  theme_bw()+
  ggtitle("Dolares")

grid.arrange(pestacioncr, pestacionusd, nrow = 2,ncol=1)
Costa Rica:Indice Estacional de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero
 en colones y dolares, febrero-2001 a julio-2021

Costa Rica:Indice Estacional de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares, febrero-2001 a julio-2021

Por otro lado, respecto al componente irregular para la serie en colones (ver en el @ref(anexos), la figura @ref(fig:descomposicionplotanexo)) ,se comporta de hasta el año 2012 de forma aditiva, en otras, palabras la variancia de la serie no fluctua con el nivel de la serie, sin embargo, a partir de 2012 hacia delante se observa que la variación aumenta con el nivel de la serie, por lo cual se podría argumentar que la serie tiene un comportamiento mixto (aditivo y multiplicativo). En contra parte, para la serie en dolares no se observa una variación similar en todo el periodo y que no varía con respecto al nivel de la serie.

fitcrc_add<-actnetcrc %>% 
  decompose(type = "additive")

fitcrc_multi<-actnetcrc %>% 
  decompose(type = "multiplicative")

fitusd_add<- actnetusd  %>% 
  decompose(type = "additive")

fitusd_multi<- actnetusd %>% 
  decompose(type = "multiplicative")

pdescompcrcadd <- fitcrc_add%>%
  autoplot() + 
  xlab("Mes")+
  ggtitle("Aditiva: Colones") +
  theme_bw()

pdescompcrcmult<-fitcrc_multi%>%
  autoplot() + xlab("Mes") +
  ggtitle("Multiplicativa: Colones")+
  theme_bw()

pdescompusdadd <- fitusd_add%>%
  autoplot() + 
  xlab("Mes")+
  ggtitle("Aditiva: Dolares") +
  theme_bw()

pdescompusdmult<-fitusd_multi%>%
  autoplot() + xlab("Mes") +
  ggtitle("Multiplicativa: Dolares")+
  theme_bw()

descompo<-grid.arrange(pdescompcrcadd,pdescompcrcmult, pdescompusdadd,pdescompusdmult, nrow = 2,ncol=2)

Para confirmar cual modelo (aditivo o multiplicativo) se ajusta mejor a cada serie se procedió a evaluar si el componente irregular identificando se ajusta a una distribución normal, para lo cual se realizaron la pruebas de hipótesis de normalidad Shapiro-Wilk y Jarque-Bera, así como una inspección gráfica por medio de Cuantil- Cuantil (qqplot). En la figura @ref(fig:irregularcrc) se puede identificar que para el caso de la serie en colones, el mejor modelo es el multiplicativo mientras que para la serie en dolares es el aditivo.

Aleatorio_Desc<-cbind(
  Aleatorio_crc_add=fitcrc_add$random,
  Aleatorio_crc_multi=fitcrc_multi$random,
  Aleatorio_usd_add=fitusd_add$random,
  Aleatorio_usd_multi=fitusd_multi$random)%>%
  as.data.frame()

jb_res_crc_add<-jarque.bera.test(Aleatorio_Desc$Aleatorio_crc_add[!is.na(Aleatorio_Desc$Aleatorio_crc_add)]) # Cumple
jb_res_crc_mult<-jarque.bera.test(Aleatorio_Desc$Aleatorio_crc_multi[!is.na(Aleatorio_Desc$Aleatorio_crc_multi)]) # Cumple
jb_res_usd_add<-jarque.bera.test(Aleatorio_Desc$Aleatorio_usd_add[!is.na(Aleatorio_Desc$Aleatorio_usd_add)]) # Cumple
jb_res_usd_multi<-jarque.bera.test(Aleatorio_Desc$Aleatorio_usd_multi[!is.na(Aleatorio_Desc$Aleatorio_usd_multi)]) # Cumple

sw_res_crc_add<-shapiro.test(Aleatorio_Desc$Aleatorio_crc_add[!is.na(Aleatorio_Desc$Aleatorio_crc_add)]) # Cumple
sw_res_crc_mult<-shapiro.test(Aleatorio_Desc$Aleatorio_crc_multi[!is.na(Aleatorio_Desc$Aleatorio_crc_multi)]) # Cumple
sw_res_usd_add<-shapiro.test(Aleatorio_Desc$Aleatorio_usd_add[!is.na(Aleatorio_Desc$Aleatorio_usd_add)]) # Cumple
sw_res_usd_multi<-shapiro.test(Aleatorio_Desc$Aleatorio_usd_multi[!is.na(Aleatorio_Desc$Aleatorio_usd_multi)]) # Cumple

## Gráficosde qqplot
p1<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_crc_add))+
  stat_qq() + 
  stat_qq_line()+
  ggtitle("Aditiva - Colones") + 
  labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value):Shapiro-Wilk:",round(sw_res_crc_add$statistic,3),",",round(sw_res_crc_add$p.value,4), "y",
                        "Jarque-Bera",round(jb_res_crc_add$statistic,3),",",round(jb_res_crc_add$p.value,4)))+
  theme_bw()

p2<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_crc_multi))+
  stat_qq() + 
  stat_qq_line()+
  ggtitle("Multiplicativa - Colones")+ 
  labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value):Shapiro Wilk:",round(sw_res_crc_mult$statistic,3),",",round(sw_res_crc_mult$p.value,4), "y",
                        "Jarque-Bera",round(jb_res_crc_mult$statistic,3),",",round(jb_res_crc_mult$p.value,4)))+
  theme_bw()

p3<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_usd_add))+
  stat_qq() + 
  stat_qq_line()+
  ggtitle("Aditiva - Dolares")+ 
  labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value): Shapiro Wilk:",round(sw_res_usd_add$statistic,3),",",round(sw_res_usd_add$p.value,4), "y",
                        "Jarque-Bera",round(jb_res_usd_add$statistic,3),",",round(jb_res_usd_add$p.value,4)))+
  theme_bw()

p4<-ggplot(Aleatorio_Desc, aes(sample = Aleatorio_usd_multi))+
  stat_qq() + 
  stat_qq_line()+
  ggtitle("Multiplicativa - Dolares")+ 
  labs(subtitle = paste("Prubas de Normalidad (Estadístico,P-Value): Shapiro Wilk:",round(sw_res_usd_multi$statistic,3),",",round(sw_res_usd_multi$p.value,4), "y",
                        "Jarque-Bera",round(jb_res_usd_multi$statistic,3),",",round(jb_res_usd_multi$p.value,4)))+
  theme_bw()

grid.arrange(p1,p2,p3,p4,nrow=2, ncol = 2)
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
## Warning: Removed 12 rows containing non-finite values (stat_qq).
## Warning: Removed 12 rows containing non-finite values (stat_qq_line).
Costa Rica: QQPlot de los residuos de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero
 en colones y dolares por tipo de descomposición, febrero-2001 a julio-2021

Costa Rica: QQPlot de los residuos de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones y dolares por tipo de descomposición, febrero-2001 a julio-2021

MaxLag2<-length(actnetcrc)/4

## Media Constante
### Niveles
# H0: No es estacionario
# H1: Es estacionario
adf_org_crc<-adf.test(actnetcrc,alternative="stationary") # Media no constante
adf_org_usd<-adf.test(actnetusd,alternative="stationary") # Media no constante

## Realiza la prueba de raíz unitaria de Zivot \ & Andrews, que permite una ruptura en un punto desconocido en la intersección, la tendencia lineal o en ambas.

## Esta prueba se basa en la estimación recursiva de una regresión de prueba. El estadístico de prueba se define como el estadístico t mínimo del coeficiente de la variable endógena rezagada.

## Recuérdese que en las pruebas a evaluar la hipótesis nula es presencia de raíz unitaria, mientras que la alternativa es estacionariedad.

## La prueba es muy sensible, realice pruebas y siempre daba resultados o pvalues diferente para una distribucion normal 1 , 0

za_org_crc<-ur.za(window(actnetcrc,start=c(2001,2),end=c(2020,2)), model="both")
summary(za_org_crc)
## 
## ################################ 
## # Zivot-Andrews Unit Root Test # 
## ################################ 
## 
## 
## Call:
## lm(formula = testmat)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -112020  -19964    -694   16741  158850 
## 
## Coefficients:
##                 Estimate   Std. Error t value             Pr(>|t|)    
## (Intercept) -17396.37000   6617.37451  -2.629              0.00916 ** 
## y.l1             0.70943      0.04757  14.912 < 0.0000000000000002 ***
## trend         1094.74967    186.94371   5.856         0.0000000168 ***
## du           42188.76580  13230.73117   3.189              0.00163 ** 
## dt            -980.19378    340.74430  -2.877              0.00441 ** 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 37160 on 223 degrees of freedom
##   (1 observation deleted due to missingness)
## Multiple R-squared:  0.9805, Adjusted R-squared:  0.9801 
## F-statistic:  2802 on 4 and 223 DF,  p-value: < 0.00000000000000022
## 
## 
## Teststatistic: -6.1077 
## Critical values: 0.01= -5.57 0.05= -5.08 0.1= -4.82 
## 
## Potential break point at position: 171
plot(za_org_crc)

time(actnetcrc)[171]
## [1] 2015.25
za_org_usd<-ur.za(window(actnetusd,start=c(2001,2),end=c(2020,3)), model="both")
summary(za_org_usd)
## 
## ################################ 
## # Zivot-Andrews Unit Root Test # 
## ################################ 
## 
## 
## Call:
## lm(formula = testmat)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -171.049  -28.592   -2.375   22.788  219.846 
## 
## Coefficients:
##             Estimate Std. Error t value             Pr(>|t|)    
## (Intercept) -0.48428    8.84633  -0.055              0.95639    
## y.l1         0.81735    0.03686  22.173 < 0.0000000000000002 ***
## trend        1.02909    0.22921   4.490            0.0000114 ***
## du          45.46407   16.89895   2.690              0.00768 ** 
## dt          -1.03127    0.34119  -3.023              0.00280 ** 
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 53.66 on 224 degrees of freedom
##   (1 observation deleted due to missingness)
## Multiple R-squared:  0.9817, Adjusted R-squared:  0.9813 
## F-statistic:  3000 on 4 and 224 DF,  p-value: < 0.00000000000000022
## 
## 
## Teststatistic: -4.9551 
## Critical values: 0.01= -5.57 0.05= -5.08 0.1= -4.82 
## 
## Potential break point at position: 155
plot(za_org_usd)

time(actnetusd)[155]
## [1] 2013.917

En relación a la estacionariedad[^5] de las series, ambas no cumplen con dicha condición ya que presentan tendencia creciente y por ende no tienen media constante en el tiempo. Para confirmar esto realiza la prueba de hipótesis de Dickey-Fuller aumentada donde lo hipótesis nula es que la serie tiene raíz unitaria (proceso no estacionario), en ambos casos no se rechaza la hipótesis nula (Serie Colones: estadístico: -3.0082767 y valor-p: 0.1515055 y la Serie Dolares: estadístico: -2.7303393 y valor-p: 0.2684702), y se puede observar que la Función de Autocorrelación Simple Muestral (ACF) decae lentamente a 0 (Figuras @ref(fig:acfpacfseriescrc) y @ref(fig:acfpacfseriesusd)), esto sugiere que para realizar estacionaria las series se podrían transformar a logaritmo y diferenciar.

autocorrecrc<-acf2(actnetcrc,max.lag = MaxLag2)
Función de autocorrelación y autocorrelación parcial estimadas de la serie de cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones, febrero 2001 a diciembre-2020

Función de autocorrelación y autocorrelación parcial estimadas de la serie de cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en colones, febrero 2001 a diciembre-2020

#actnetcrc%>% ggtsdisplay(main="Colones")
autocorreusd<-acf2(actnetcrc,max.lag = MaxLag2)
Función de autocorrelación y autocorrelación parcial estimadas de la serie de cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares, febrero 2001 a diciembre-2020

Función de autocorrelación y autocorrelación parcial estimadas de la serie de cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares, febrero 2001 a diciembre-2020

# actnetusd%>% ggtsdisplay(main="Dolares")
crclineal<-fNonlinear::tnnTest(actnetusd, lag = 1, title = NULL, description = NULL)
usdlineal<-fNonlinear::tnnTest(actnetcrc, lag = 1, title = NULL, description = NULL)
# Kennan tests for nonlineary
# 
# La hipótesis nula de que la serie de tiempo sigue algún proceso de AR.
Keenan.test(log(actnetcrc))
## $test.stat
## [1] 6.554213
## 
## $p.value
## [1] 0.01107209
## 
## $order
## [1] 1
Keenan.test(log(actnetcrc), order=1)
## $test.stat
## [1] 6.554213
## 
## $p.value
## [1] 0.01107209
## 
## $order
## [1] 1
Keenan.test(log(actnetcrc), order=2)
## $test.stat
## [1] 7.604942
## 
## $p.value
## [1] 0.006268688
## 
## $order
## [1] 2
Keenan.test(log(actnetcrc), order=3)
## $test.stat
## [1] 12.3357
## 
## $p.value
## [1] 0.0005315724
## 
## $order
## [1] 3
Keenan.test(log(actnetusd))
## $test.stat
## [1] 6.742342
## 
## $p.value
## [1] 0.009991382
## 
## $order
## [1] 1
Keenan.test(log(actnetusd), order=1)
## $test.stat
## [1] 6.742342
## 
## $p.value
## [1] 0.009991382
## 
## $order
## [1] 1
Keenan.test(log(actnetusd), order=2)
## $test.stat
## [1] 6.162359
## 
## $p.value
## [1] 0.01373462
## 
## $order
## [1] 2
Keenan.test(log(actnetusd), order=3)
## $test.stat
## [1] 7.098561
## 
## $p.value
## [1] 0.008242026
## 
## $order
## [1] 3

Lo que respecta a la linealidad de las series, se observa que las mismas cumplen con la linealidad en la media lo que es confirmado con la prueba de hipótesis de Teraesvirta, de la cual se concluye que no hay suficiente evidencia estadística para rechazar la hipótesis nula que la serie cronológica es lineal en la media, tanto para colones como dolares (Colones: Estadístico 0.4947052 , Valor P 0.7808653 ; Estadístico 1.4958362 , Valor P 0.473351 )

En la figura @ref(fig:variabilidadseries) se observa para el caso de colones una variabilidad estable a lo largo del periodo de análisis, por otro lado, los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares se observa una variabilidad importante antes del año 2005, sin embargo, posterior a ese año tiende a estabilizarse.

variabilidad_crc <- log(actnetcrc)/log(stats::lag(actnetcrc,1))
variabilidad_usd <- log(actnetusd)/log(stats::lag(actnetusd,1))

pvariabilidad_crc<-autoplot(variabilidad_crc)+ theme_bw()+ ggtitle('Colones')+
  scale_y_continuous(limits = c(0.75,1.1))
pvariabilidad_usd <- autoplot(variabilidad_usd)+ theme_bw()+ ggtitle('Dolares')+
  scale_y_continuous(limits = c(0.75,1.1))

grid.arrange(pvariabilidad_crc,pvariabilidad_usd,nrow=1,ncol=2)
Evolución de la variabilidad de la serie cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares, febrero 2001 a diciembre-2020

Evolución de la variabilidad de la serie cronológica de los saldos del Activo Neto Administrado de los Fondos de Inversión del Mercado de Dinero en dolares, febrero 2001 a diciembre-2020

3.2. Modelos

3.2.1. Modelo Lineal

A partir del análisis exploratorio realizado de las series y considerando sus caracteristicas se procedió a estimar 5 modelos de pronóstico lineales por cada serie:

  • Modelo de Suavizamiento Exponencial Holt-Winter Aditivo

  • Modelo de Suavizamiento Exponencial Holt-Winter Multiplicativo

  • 3 Modelos univariantes autorregresivos integrados de media movil (ARIMA)

## Peridos de Tiempo
inicio_train<- c(2011,1)
fin_train<- c(2021,2)
inicio_test <- c(2021,3)

sactnetcrc<- window(actnetcrc,start=inicio_train)
sactnetcrc_train<- window(actnetcrc,start=inicio_train, end=fin_train)
sactnetcrc_test<- window(actnetcrc,start=inicio_test)

sactnetusd<- window(actnetusd,start=inicio_train)
sactnetusd_train<- window(actnetusd,start=inicio_train, end=fin_train)
sactnetusd_test<- window(actnetusd,start=inicio_test)

h.param <- length(sactnetcrc_test)

Serie en Colones

Holt Winter

MODELOS

Holt Winter Multiplicativo

ht2_multi <- hw(sactnetcrc_train, seasonal = "multiplicative", h = h.param)

summary(ht2_multi)
## 
## Forecast method: Holt-Winters' multiplicative method
## 
## Model Information:
## Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = sactnetcrc_train, h = h.param, seasonal = "multiplicative") 
## 
##   Smoothing parameters:
##     alpha = 0.8493 
##     beta  = 0.0001 
##     gamma = 0.0006 
## 
##   Initial states:
##     l = 396675.0123 
##     b = 5764.5991 
##     s = 0.8942 0.931 1.0095 1.0288 0.9945 1.0234
##            1.0383 1.0152 1.0124 0.988 1.0262 1.0384
## 
##   sigma:  0.0721
## 
##      AIC     AICc      BIC 
## 3215.470 3221.355 3263.139 
## 
## Error measures:
##                     ME     RMSE      MAE       MPE     MAPE     MASE       ACF1
## Training set -1006.657 45159.96 35508.68 -0.540406 5.533052 0.377747 0.01506986
## 
## Forecasts:
##          Point Forecast    Lo 80   Hi 80    Lo 95   Hi 95
## Mar 2021       967725.8 878338.4 1057113 831019.5 1104432
## Apr 2021       997327.2 876620.2 1118034 812721.8 1181932
## May 2021      1005920.7 861127.4 1150714 784478.4 1227363
## Jun 2021      1034847.4 865609.1 1204086 776019.8 1293675
## Jul 2021      1026135.4 840499.1 1211772 742229.2 1310042
pred_ht2_multi <- ht2_multi$mean

checkresiduals(ht2_multi)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt-Winters' multiplicative method
## Q* = 21.747, df = 8, p-value = 0.005407
## 
## Model df: 16.   Total lags used: 24

Holt Winter Aditivo

ht2_add <- hw(sactnetcrc_train, seasonal = "additive", h = h.param)

summary(ht2_add)

pred_ht2_add <- ht2_add$mean

checkresiduals(ht2_add)

METRICAS HOLT WINTER

perf_ht2_add_train<-getPerformance(ht2_add$model$fitted,sactnetcrc_train)
perf_ht2_multi_train<-getPerformance(ht2_multi$model$fitted,sactnetcrc_train)

perf_ht2_add<-getPerformance(pred_ht2_add, sactnetcrc_test)
perf_ht2_multi<-getPerformance(pred_ht2_multi, sactnetcrc_test)

data.frame(
  Modelo = c("Holt Winter Aditivia",
             "Holt Winter Multiplicativa"),
  AIC = c(ht2_add$model$aic,
          ht2_multi$model$aic),
  AICc= c(ht2_add$model$aicc,
          ht2_multi$model$aicc),
  BIC = c(ht2_add$model$bic,
          ht2_multi$model$bic))%>%
  arrange(AIC)%>%
  knitr::kable(caption="Metricas de Bondad de Ajuste")

MetResHW<-cbind(
  Modelo = c("Holt Winter Aditivia",
             "Holt Winter Multiplicativa",
             "Holt Winter Aditivia",
             "Holt Winter Multiplicativa"),
  Dataset=c("Entrenamiento","Entrenamiento","Prueba","Prueba"),
  rbind(
    perf_ht2_add_train,
    perf_ht2_multi_train,
    perf_ht2_add, 
    perf_ht2_multi
        )
) %>%
  arrange(RMSE)

MetResHW%>%
  knitr::kable(caption="Metricas de Ajuste sobre la tabla de validación")
ARIMA
## 
## Call:
## seas(x = sactnetcrc_train, transform.function = "log", regression.aictest = NULL, 
##     outlier = NULL, regression.variables = "ao2020.Mar", arima.model = "(0 1 0)(1 0 1)")
## 
## Coefficients:
##                Estimate Std. Error z value             Pr(>|z|)    
## AO2020.Mar     -0.26213    0.04651  -5.636   0.0000000173759851 ***
## AR-Seasonal-12  0.93237    0.03143  29.664 < 0.0000000000000002 ***
## MA-Seasonal-12  0.62705    0.08332   7.526   0.0000000000000523 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## SEATS adj.  ARIMA: (0 1 0)(1 0 1)  Obs.: 122  Transform: log
## AICc:  2930, BIC:  2941  QS (no seasonality in final):    0  
## Box-Ljung (no autocorr.): 25.52   Shapiro (normality): 0.9725 **
## 
## Call:
## seas(x = sactnetcrc_train, transform.function = "none", regression.aictest = NULL, 
##     outlier = NULL, regression.variables = "ao2020.Mar", arima.model = "(0 1 0)(0 1 1)")
## 
## Coefficients:
##                     Estimate    Std. Error z value             Pr(>|z|)    
## AO2020.Mar     -200519.71119   29678.44417  -6.756      0.0000000000141 ***
## MA-Seasonal-12       0.78410       0.07922   9.898 < 0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## SEATS adj.  ARIMA: (0 1 0)(0 1 1)  Obs.: 122  Transform: none
## AICc:  2647, BIC:  2655  QS (no seasonality in final):    0  
## Box-Ljung (no autocorr.): 27.41   Shapiro (normality): 0.9743 *
## 
## Call:
## seas(x = sactnetcrc_train, transform.function = "log", regression.aictest = NULL, 
##     outlier = NULL, regression.variables = c("ls2015.May", "ao2020.Mar"), 
##     arima.model = "(2 1 0)(1 0 0)")
## 
## Coefficients:
##                   Estimate Std. Error z value         Pr(>|z|)    
## LS2015.May         0.21302    0.05516   3.861         0.000113 ***
## AO2020.Mar        -0.24242    0.04827  -5.023 0.00000051001064 ***
## AR-Nonseasonal-01 -0.13474    0.08772  -1.536         0.124556    
## AR-Nonseasonal-02 -0.23542    0.08922  -2.639         0.008325 ** 
## AR-Seasonal-12     0.54195    0.07683   7.054 0.00000000000173 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## SEATS adj.  ARIMA: (2 1 0)(1 0 0)  Obs.: 122  Transform: log
## AICc:  2926, BIC:  2942  QS (no seasonality in final):    0  
## Box-Ljung (no autocorr.): 19.38   Shapiro (normality): 0.9827

Análisis de Supuestos

resseas1 <- resid(modelseas1)
resseas2 <- resid(modelseas2)
resseas3 <- resid(modelseas3)
#### Estacionariedad de los residuos
## Media Constante

adf_res_CRC_1<- adf.test(resseas1 , alternative='stationary')
## Warning in adf.test(resseas1, alternative = "stationary"): p-value smaller than
## printed p-value
adf_res_CRC_2<- adf.test(resseas2 , alternative='stationary')
## Warning in adf.test(resseas2, alternative = "stationary"): p-value smaller than
## printed p-value
adf_res_CRC_3<- adf.test(resseas3 , alternative='stationary')
## Warning in adf.test(resseas3, alternative = "stationary"): p-value smaller than
## printed p-value
adf_res_CRC_1
## 
##  Augmented Dickey-Fuller Test
## 
## data:  resseas1
## Dickey-Fuller = -4.8317, Lag order = 5, p-value = 0.01
## alternative hypothesis: stationary
adf_res_CRC_2
## 
##  Augmented Dickey-Fuller Test
## 
## data:  resseas2
## Dickey-Fuller = -5.1753, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary
adf_res_CRC_3
## 
##  Augmented Dickey-Fuller Test
## 
## data:  resseas3
## Dickey-Fuller = -4.4897, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary
#### Autocorrelaciòn de los resiudos
#####################################
#Autocorrelacion de los residuales y pruebas gráficas
## Ljung-Box test

# H0: Independencia de los residuos
# H1: No Independencia de los residuos

lb_res_CRC_1 <- checkresiduals(modelseas1 , lag=MaxLag2)
lb_res_CRC_2 <- checkresiduals(modelseas2 , lag=MaxLag2)
lb_res_CRC_3 <- checkresiduals(modelseas3 , lag=MaxLag2)
#### Varianza Constante de los residuos

## Varianza Constante ARCH Engle's Test for Residual Heteroscedasticity
# H0: los residuos son homocedasticos
# H1: los residuos no son homocedasticos

FinTS::ArchTest(resseas1,lag=12)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  resseas1
## Chi-squared = 9.7695, df = 12, p-value = 0.6362
FinTS::ArchTest(resseas2,lag=12)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  resseas2
## Chi-squared = 12.286, df = 12, p-value = 0.423
FinTS::ArchTest(resseas3,lag=12)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  resseas3
## Chi-squared = 10.948, df = 12, p-value = 0.5334
autoplot(resseas1^2 )+ theme_bw() ; acf2(resseas1^2, max.lag=MaxLag2)

##      [,1] [,2]  [,3]  [,4]  [,5]  [,6]  [,7]  [,8]  [,9] [,10] [,11] [,12]
## ACF  0.11 0.12 -0.02 -0.10 -0.08 -0.16 -0.01 -0.07 -0.11 -0.11 -0.08  0.03
## PACF 0.11 0.11 -0.04 -0.11 -0.05 -0.13  0.03 -0.05 -0.12 -0.12 -0.06  0.03
##      [,13] [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24]
## ACF   0.08  0.01 -0.03 -0.01  0.00 -0.06  0.00  0.02 -0.07 -0.03 -0.12  0.11
## PACF  0.07 -0.06 -0.11 -0.03  0.01 -0.07 -0.02 -0.03 -0.11 -0.03 -0.11  0.11
##      [,25] [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36]
## ACF   0.03  0.04  0.13  0.01  0.01 -0.04 -0.06  0.01  0.00  0.03 -0.08  0.02
## PACF -0.01 -0.05  0.08 -0.02 -0.05 -0.02 -0.08  0.00  0.03  0.03 -0.09  0.03
##      [,37] [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48]
## ACF  -0.02  0.01  0.05  0.03 -0.04 -0.11 -0.08 -0.09 -0.03 -0.05 -0.01 -0.06
## PACF -0.04  0.02  0.05 -0.01 -0.12 -0.08 -0.05 -0.08 -0.01 -0.12 -0.06 -0.10
##      [,49] [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60]
## ACF   0.00  0.02  0.05  0.26  0.18  0.15  0.06 -0.01 -0.02 -0.10  0.00  0.02
## PACF -0.04 -0.02 -0.06  0.16  0.08  0.05  0.05 -0.02  0.02 -0.04  0.08  0.07
##      [,61]
## ACF  -0.08
## PACF -0.05
autoplot(resseas2^2 )+ theme_bw() ; acf2(resseas2^2, max.lag=MaxLag2)

##      [,1] [,2] [,3] [,4] [,5]  [,6] [,7]  [,8] [,9] [,10] [,11] [,12] [,13]
## ACF  0.01 0.14  0.1 0.09 0.05 -0.02 0.22 -0.08 0.10 -0.03 -0.01  0.01  0.13
## PACF 0.01 0.14  0.1 0.07 0.02 -0.06 0.21 -0.08 0.05 -0.05 -0.04  0.01  0.17
##      [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25]
## ACF  -0.03 -0.05  0.08  0.06  0.00  0.00  0.04 -0.06  0.02 -0.11  0.14 -0.03
## PACF -0.09 -0.05  0.03  0.10 -0.01 -0.01 -0.05 -0.03  0.02 -0.11  0.16 -0.02
##      [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36] [,37]
## ACF   0.15  0.15 -0.02  0.14 -0.04  0.01 -0.07  0.12  0.00 -0.07  0.04 -0.03
## PACF  0.14  0.19 -0.04  0.03 -0.04 -0.13 -0.02  0.05 -0.02 -0.04  0.02 -0.03
##      [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48] [,49]
## ACF   0.02 -0.06  0.11 -0.02 -0.02 -0.06 -0.07 -0.01 -0.09 -0.04 -0.06 -0.03
## PACF  0.07 -0.04  0.05  0.02 -0.07 -0.13 -0.05 -0.02 -0.09  0.03  0.01  0.03
##      [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60] [,61]
## ACF   0.02 -0.01  0.01  0.11  0.14  0.02  0.06  0.02 -0.07  0.03  0.01 -0.04
## PACF  0.11 -0.03  0.03  0.05  0.12  0.00  0.04 -0.10 -0.14  0.08 -0.02 -0.03
autoplot(resseas3^2 )+ theme_bw() ; acf2(resseas3^2, max.lag=MaxLag2)

##      [,1] [,2] [,3]  [,4]  [,5]  [,6]  [,7]  [,8]  [,9] [,10] [,11] [,12] [,13]
## ACF  0.02 0.11 0.03 -0.02 -0.13 -0.14 -0.02 -0.09 -0.03  0.06  0.07  0.21  0.05
## PACF 0.02 0.11 0.02 -0.03 -0.14 -0.14  0.02 -0.05 -0.03  0.06  0.05  0.20  0.02
##      [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25]
## ACF   0.10  0.00 -0.04  0.00 -0.08  0.12 -0.08  0.06  0.01  0.01  0.11 -0.05
## PACF  0.03 -0.02 -0.03  0.06 -0.01  0.16 -0.04  0.04  0.00 -0.03  0.09 -0.06
##      [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36] [,37]
## ACF   0.00 -0.08 -0.08  0.14 -0.12 -0.02 -0.10  0.04 -0.10 -0.09  0.10 -0.06
## PACF -0.05 -0.05 -0.07  0.20 -0.11 -0.13 -0.12 -0.01 -0.05 -0.12  0.03 -0.04
##      [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48] [,49]
## ACF   0.01 -0.06  0.10  0.10 -0.05  0.10 -0.06 -0.03 -0.09 -0.08 -0.09 -0.09
## PACF -0.02 -0.04  0.06  0.11 -0.04  0.05  0.03  0.02  0.03 -0.03 -0.15  0.04
##      [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60] [,61]
## ACF  -0.05 -0.10  0.04  0.05 -0.04  0.20 -0.05  0.01 -0.04 -0.04 -0.05 -0.07
## PACF -0.07 -0.05 -0.01 -0.05 -0.06  0.15 -0.04 -0.07 -0.04 -0.03  0.01  0.05
#### Normalidad de los residuos

#####################################
#Normalidad de los residuales

# H0: La muestra proviene de una distribución normal.
# H1: La muestra no proviene de una distribución normal.

## Jarque Bera

jb_res_CRC_1<-jarque.bera.test(resseas1) # Cumple
jb_res_CRC_2<-jarque.bera.test(resseas2) # Cumple
jb_res_CRC_3<-jarque.bera.test(resseas3) # Cumple


jb_res_CRC_1
## 
##  Jarque Bera Test
## 
## data:  resseas1
## X-squared = 9.9942, df = 2, p-value = 0.006758
jb_res_CRC_2
## 
##  Jarque Bera Test
## 
## data:  resseas2
## X-squared = 4.636, df = 2, p-value = 0.09847
jb_res_CRC_3
## 
##  Jarque Bera Test
## 
## data:  resseas3
## X-squared = 1.8213, df = 2, p-value = 0.4023
sw_res_CRC_1<-shapiro.test(resseas1) # Cumple
sw_res_CRC_2<-shapiro.test(resseas2) # Cumple
sw_res_CRC_3<-shapiro.test(resseas3) # Cumple

sw_res_CRC_1
## 
##  Shapiro-Wilk normality test
## 
## data:  resseas1
## W = 0.97246, p-value = 0.008379
sw_res_CRC_2
## 
##  Shapiro-Wilk normality test
## 
## data:  resseas2
## W = 0.97432, p-value = 0.02051
sw_res_CRC_3
## 
##  Shapiro-Wilk normality test
## 
## data:  resseas3
## W = 0.98269, p-value = 0.1232
# car::qqPlot(resseas1)
# car::qqPlot(resseas2)
# car::qqPlot(resseas3)
#### PRONOSTICO SOBRE LOS DATOS DE VALIDACION
pronostico_CRC_1 <-
  window(
    series(modelseas1, "forecast.forecasts"),
    start = inicio_test,
    end = c(2021, 7)
  )
## specs have been added to the model: forecast
pronostico_CRC_2 <-
  window(
    series(modelseas2, "forecast.forecasts"),
    start = inicio_test,
    end = c(2021, 7)
  )
## specs have been added to the model: forecast
pronostico_CRC_3 <-
  window(
    series(modelseas3, "forecast.forecasts"),
    start = inicio_test,
    end = c(2021, 7)
  )
## specs have been added to the model: forecast
## PRONOSTICO SOBRE ENTRENAMIENTO
pronostico_CRC_1_train  <- final(modelseas1)
pronostico_CRC_2_train  <- final(modelseas2)
pronostico_CRC_3_train  <- final(modelseas3)
### METRICAS DE RENDIMIENTO ARIMA

## ENTRENAMIENTO

perfor_crc_train_mod_1 <-
  getPerformance(pronostico_CRC_1_train, sactnetcrc_train)
perfor_crc_train_mod_2 <-
  getPerformance(pronostico_CRC_2_train, sactnetcrc_train)
perfor_crc_train_mod_3 <-
  getPerformance(pronostico_CRC_3_train, sactnetcrc_train)

### VALIDACION
### 
perfor_crc_test_mod_1 <-
  getPerformance(pronostico_CRC_1[, 1], sactnetcrc_test)
perfor_crc_test_mod_2 <-
  getPerformance(pronostico_CRC_2[, 1], sactnetcrc_test)
perfor_crc_test_mod_3 <-
  getPerformance(pronostico_CRC_3[, 1], sactnetcrc_test)


### TABLA DE METRICAS

## ENTRENAMIENTO (AIC,BIC)

data.frame(
  Models=c(
      "1. ARIMA (0 1 0)(1 0 1) Log",
      "2. ARIMA (0 1 0)(0 1 1) Niveles",
      "3. ARIMA (2 1 0)(1 0 0) Log"
    ),
AIC=c(AIC(modelseas1),AIC(modelseas2),AIC(modelseas3)),
BIC=c(BIC(modelseas1),BIC(modelseas2),BIC(modelseas3))
)%>%
  arrange(AIC)%>%
  knitr::kable(caption="Medidas de Ajuste: ARIMA Entrenamiento")
Medidas de Ajuste: ARIMA Entrenamiento
Models AIC BIC
2. ARIMA (0 1 0)(0 1 1) Niveles 2646.710 2654.784
3. ARIMA (2 1 0)(1 0 0) Log 2925.722 2942.497
1. ARIMA (0 1 0)(1 0 1) Log 2929.554 2940.737
## VALIDACION Y ENTRENAMIENTO

Metricas_Sarima_CRC <- data.frame(
  Modelo = rep(
    c(
      "1. ARIMA (0 1 0)(1 0 1) Log",
      "2. ARIMA (0 1 0)(0 1 1) Niveles",
      "3. ARIMA (2 1 0)(1 0 0) Log"
    ),
    2
  ),
  Dataset = c(rep("Entrenamiento", 3), rep("Prueba", 3)),
  rbind(
    perfor_crc_train_mod_1,
    perfor_crc_train_mod_2,
    perfor_crc_train_mod_3,
    perfor_crc_test_mod_1,
    perfor_crc_test_mod_2,
    perfor_crc_test_mod_3
  )
)

#Metricas_Mod_Lin <- bind_rows(Metricas_HW, Metricas_Sarima_CRC)
Metricas_Mod_Lin<- Metricas_Sarima_CRC

knitr::kable(Metricas_Mod_Lin)
Modelo Dataset MAE RSS MSE RMSE
1. ARIMA (0 1 0)(1 0 1) Log Entrenamiento 21165.00 99913636070 818964230 28617.55
2. ARIMA (0 1 0)(0 1 1) Niveles Entrenamiento 21829.20 103361143650 847222489 29107.09
3. ARIMA (2 1 0)(1 0 0) Log Entrenamiento 28923.01 165071131447 1353042061 36783.72
1. ARIMA (0 1 0)(1 0 1) Log Prueba 47783.00 17050515820 3410103164 58396.09
2. ARIMA (0 1 0)(0 1 1) Niveles Prueba 47704.12 17563051551 3512610310 59267.28
3. ARIMA (2 1 0)(1 0 0) Log Prueba 42671.15 12081978350 2416395670 49156.85
# ggplot(Metricas_Mod_Lin) +
#   aes(x = Modelo, fill = Dataset, weight = RMSE) +
#   geom_bar() +
#   scale_fill_manual(values = c(Entrenamiento = "#E69999",
#                                Prueba = "#5C7FA7")) +
#   labs(x = "Método", y = "RMSE") +
#   coord_flip() +
#   theme_minimal() +
#   theme(legend.position = "none") +
#   facet_wrap(vars(Dataset), scales = "free", ncol = 1L)

Métricas de Rendimiento Modelos Lineales

rbind(Metricas_Sarima_CRC,
MetResHW)%>%
  arrange(Dataset,MAE)%>%
  knitr::kable(caption="Metricas de Rendimiento sobre la muestra de prueba")
Metricas de Rendimiento sobre la muestra de prueba
Modelo Dataset MAE RSS MSE RMSE
1. ARIMA (0 1 0)(1 0 1) Log Entrenamiento 21165.00 99913636070 818964230 28617.55
2. ARIMA (0 1 0)(0 1 1) Niveles Entrenamiento 21829.20 103361143650 847222489 29107.09
3. ARIMA (2 1 0)(1 0 0) Log Entrenamiento 28923.01 165071131447 1353042061 36783.72
Holt Winter Aditivia Entrenamiento 32997.01 230557903869 1889818884 43472.05
Holt Winter Multiplicativa Entrenamiento 35508.68 248809460176 2039421805 45159.96
3. ARIMA (2 1 0)(1 0 0) Log Prueba 42671.15 12081978350 2416395670 49156.85
2. ARIMA (0 1 0)(0 1 1) Niveles Prueba 47704.12 17563051551 3512610310 59267.28
1. ARIMA (0 1 0)(1 0 1) Log Prueba 47783.00 17050515820 3410103164 58396.09
Holt Winter Aditivia Prueba 47797.04 16671689420 3334337884 57743.73
Holt Winter Multiplicativa Prueba 53117.50 18279523374 3655904675 60464.08
### Holt-Winter Aditivo
# ht2_add_all <- hw(sactnetcrc,seasonal="additive",h = 5)
# autoplot(ht2_add_all)+
#   theme_bw()

### Arima (2,1,0) (1,0,1) Log
modelseas2_all <- seas(
  x = sactnetcrc,
  transform.function = "none",
  regression.aictest = NULL,
  outlier = NULL,
  regression.variables = "ao2020.Mar",
  arima.model = "(0 1 0)(0 1 1)"
)

pronostico_lin <- window(
    series(modelseas2_all, "forecast.forecasts"),
    start = c(2021, 8),
    end = c(2021, 12)
  )
## specs have been added to the model: forecast
autoplot(sactnetcrc) +
  autolayer(pronostico_lin) +
  theme_bw()
## For a multivariate time series, specify a seriesname for each time series. Defaulting to column names.

Serie en Dolares

Hold-Winter

Holt Winter Multiplicativo

ht2_multi_usd <- hw(sactnetusd_train,seasonal="multiplicative",h = h.param)

summary(ht2_multi_usd)
## 
## Forecast method: Holt-Winters' multiplicative method
## 
## Model Information:
## Holt-Winters' multiplicative method 
## 
## Call:
##  hw(y = sactnetusd_train, h = h.param, seasonal = "multiplicative") 
## 
##   Smoothing parameters:
##     alpha = 0.7168 
##     beta  = 0.0001 
##     gamma = 0.0006 
## 
##   Initial states:
##     l = 654.1817 
##     b = 10.5878 
##     s = 0.9495 0.9737 0.9555 0.9715 1.0324 1.032
##            1.0286 1.0576 0.9948 1.0028 1.0128 0.9889
## 
##   sigma:  0.0772
## 
##      AIC     AICc      BIC 
## 1658.563 1664.447 1706.231 
## 
## Error measures:
##                     ME     RMSE      MAE        MPE     MAPE      MASE     ACF1
## Training set -3.323565 70.01249 54.89003 -0.8501555 5.724199 0.3219086 0.172628
## 
## Forecasts:
##          Point Forecast    Lo 80    Hi 80    Lo 95    Hi 95
## Mar 2021       1656.303 1492.375 1820.230 1405.598 1907.008
## Apr 2021       1653.377 1452.261 1854.492 1345.797 1960.956
## May 2021       1768.911 1520.310 2017.512 1388.709 2149.113
## Jun 2021       1731.074 1459.224 2002.923 1315.316 2146.832
## Jul 2021       1747.492 1447.202 2047.783 1288.237 2206.747
pred_ht2_multi_usd <- ht2_multi_usd$mean

checkresiduals(ht2_multi_usd)

## 
##  Ljung-Box test
## 
## data:  Residuals from Holt-Winters' multiplicative method
## Q* = 22.92, df = 8, p-value = 0.003468
## 
## Model df: 16.   Total lags used: 24

Holt Winter Aditivo

ht2_add_usd <- hw(sactnetusd_train,seasonal="additive",h = h.param)

summary(ht2_add_usd)

pred_ht2_add_usd <- ht2_add_usd$mean

checkresiduals(ht2_add_usd)

METRICAS HOLT WINTER

perf_ht2_add_train_usd<-getPerformance(ht2_add_usd$model$fitted,sactnetusd_train)
perf_ht2_multi_train_usd<-getPerformance(ht2_multi_usd$model$fitted,sactnetusd_train)

perf_ht2_add_usd<-getPerformance(pred_ht2_add_usd, sactnetusd_test)
perf_ht2_multi_usd<-getPerformance(pred_ht2_multi_usd, sactnetusd_test)

data.frame(
  Modelo = c("Holt Winter Aditivia",
             "Holt Winter Multiplicativa"),
  AIC = c(ht2_add_usd$model$aic,
          ht2_multi_usd$model$aic),
  AICc= c(ht2_add_usd$model$aicc,
          ht2_multi_usd$model$aicc),
  BIC = c(ht2_add_usd$model$bic,
          ht2_multi_usd$model$bic))%>%
  arrange(AIC)%>%
  knitr::kable(caption="Metricas de Bondad de Ajuste")

MetResHW_usd<-cbind(
  Modelo = c("Holt Winter Aditivia",
             "Holt Winter Multiplicativa",
             "Holt Winter Aditivia",
             "Holt Winter Multiplicativa"),
  Dataset=c("Entrenamiento","Entrenamiento","Prueba","Prueba"),
  rbind(
    perf_ht2_add_train_usd,
    perf_ht2_multi_train_usd,
    perf_ht2_add_usd, 
    perf_ht2_multi_usd
        )
) %>%
  arrange(RMSE)

MetResHW_usd%>%
  knitr::kable(caption="Metricas de Ajuste sobre la tabla de validación")

ARIMA

## 
## Call:
## seas(x = sactnetcrc_train, transform.function = "log", regression.aictest = NULL, 
##     outlier = NULL, regression.variables = "ao2020.Mar", arima.model = "(0 1 0)(1 0 1)")
## 
## Coefficients:
##                Estimate Std. Error z value             Pr(>|z|)    
## AO2020.Mar     -0.26213    0.04651  -5.636   0.0000000173759851 ***
## AR-Seasonal-12  0.93237    0.03143  29.664 < 0.0000000000000002 ***
## MA-Seasonal-12  0.62705    0.08332   7.526   0.0000000000000523 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## SEATS adj.  ARIMA: (0 1 0)(1 0 1)  Obs.: 122  Transform: log
## AICc:  2930, BIC:  2941  QS (no seasonality in final):    0  
## Box-Ljung (no autocorr.): 25.52   Shapiro (normality): 0.9725 **
## 
## Call:
## seas(x = sactnetcrc_train, transform.function = "none", regression.aictest = NULL, 
##     outlier = NULL, regression.variables = "ao2020.Mar", arima.model = "(0 1 0)(0 1 1)")
## 
## Coefficients:
##                     Estimate    Std. Error z value             Pr(>|z|)    
## AO2020.Mar     -200519.71119   29678.44417  -6.756      0.0000000000141 ***
## MA-Seasonal-12       0.78410       0.07922   9.898 < 0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## SEATS adj.  ARIMA: (0 1 0)(0 1 1)  Obs.: 122  Transform: none
## AICc:  2647, BIC:  2655  QS (no seasonality in final):    0  
## Box-Ljung (no autocorr.): 27.41   Shapiro (normality): 0.9743 *
## 
## Call:
## seas(x = sactnetcrc_train, transform.function = "log", regression.aictest = NULL, 
##     outlier = NULL, regression.variables = c("ls2015.May", "ao2020.Mar"), 
##     arima.model = "(2 1 0)(1 0 0)")
## 
## Coefficients:
##                   Estimate Std. Error z value         Pr(>|z|)    
## LS2015.May         0.21302    0.05516   3.861         0.000113 ***
## AO2020.Mar        -0.24242    0.04827  -5.023 0.00000051001064 ***
## AR-Nonseasonal-01 -0.13474    0.08772  -1.536         0.124556    
## AR-Nonseasonal-02 -0.23542    0.08922  -2.639         0.008325 ** 
## AR-Seasonal-12     0.54195    0.07683   7.054 0.00000000000173 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## SEATS adj.  ARIMA: (2 1 0)(1 0 0)  Obs.: 122  Transform: log
## AICc:  2926, BIC:  2942  QS (no seasonality in final):    0  
## Box-Ljung (no autocorr.): 19.38   Shapiro (normality): 0.9827

Análisis de Supuestos

resseas1_usd<-resid(modelseas1_usd)
resseas2_usd<-resid(modelseas2_usd)
resseas3_usd<-resid(modelseas3_usd)
#### Estacionariedad de los residuos

## Media Constante

adf_res_usd_1<- adf.test(resseas1_usd , alternative='stationary')
## Warning in adf.test(resseas1_usd, alternative = "stationary"): p-value smaller
## than printed p-value
adf_res_usd_2<- adf.test(resseas2_usd , alternative='stationary')
## Warning in adf.test(resseas2_usd, alternative = "stationary"): p-value smaller
## than printed p-value
adf_res_usd_3<- adf.test(resseas3_usd , alternative='stationary')
## Warning in adf.test(resseas3_usd, alternative = "stationary"): p-value smaller
## than printed p-value
adf_res_usd_1
## 
##  Augmented Dickey-Fuller Test
## 
## data:  resseas1_usd
## Dickey-Fuller = -4.83, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary
adf_res_usd_2
## 
##  Augmented Dickey-Fuller Test
## 
## data:  resseas2_usd
## Dickey-Fuller = -4.9356, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary
adf_res_usd_3
## 
##  Augmented Dickey-Fuller Test
## 
## data:  resseas3_usd
## Dickey-Fuller = -5.34, Lag order = 4, p-value = 0.01
## alternative hypothesis: stationary
#### Autocorrelaciòn de los resiudos
#####################################
#Autocorrelacion de los residuales y pruebas gráficas
## Ljung-Box test

# H0: Independencia de los residuos
# H1: No Independencia de los residuos

lb_res_usd_1 <- checkresiduals(modelseas1_usd , lag=MaxLag2)
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

lb_res_usd_2 <- checkresiduals(modelseas2_usd , lag=MaxLag2)
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

lb_res_usd_3 <- checkresiduals(modelseas3_usd , lag=MaxLag2)
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

#####################################
#Normalidad de los residuales

# H0: La muestra proviene de una distribución normal.
# H1: La muestra no proviene de una distribución normal.

## Jarque Bera

jb_res_usd_1<-jarque.bera.test(resseas1_usd) # Cumple
jb_res_usd_2<-jarque.bera.test(resseas2_usd) # Cumple
jb_res_usd_3<-jarque.bera.test(resseas3_usd) # Cumple


jb_res_usd_1
## 
##  Jarque Bera Test
## 
## data:  resseas1_usd
## X-squared = 0.54286, df = 2, p-value = 0.7623
jb_res_usd_2
## 
##  Jarque Bera Test
## 
## data:  resseas2_usd
## X-squared = 0.45663, df = 2, p-value = 0.7959
jb_res_usd_3
## 
##  Jarque Bera Test
## 
## data:  resseas3_usd
## X-squared = 1.0719, df = 2, p-value = 0.5851
sw_res_usd_1<-shapiro.test(resseas1_usd) # Cumple
sw_res_usd_2<-shapiro.test(resseas2_usd) # Cumple
sw_res_usd_3<-shapiro.test(resseas3_usd) # Cumple

sw_res_usd_1
## 
##  Shapiro-Wilk normality test
## 
## data:  resseas1_usd
## W = 0.99011, p-value = 0.6078
sw_res_usd_2
## 
##  Shapiro-Wilk normality test
## 
## data:  resseas2_usd
## W = 0.99379, p-value = 0.8693
sw_res_usd_3
## 
##  Shapiro-Wilk normality test
## 
## data:  resseas3_usd
## W = 0.98456, p-value = 0.1791
car::qqPlot(resseas1_usd)

## [1] 44 73
car::qqPlot(resseas2_usd)

## [1] 29 56
car::qqPlot(resseas3_usd)

## [1]  56 113
#### Varianza Constante de los residuos
## Varianza Constante ARCH Engle's Test for Residual Heteroscedasticity
# H0: los residuos son homocedasticos
# H1: los residuos no son homocedasticos

FinTS::ArchTest(resseas1_usd,lag=12)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  resseas1_usd
## Chi-squared = 8.6313, df = 12, p-value = 0.7341
FinTS::ArchTest(resseas2_usd,lag=12)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  resseas2_usd
## Chi-squared = 9.3079, df = 12, p-value = 0.6764
FinTS::ArchTest(resseas3_usd,lag=12)
## 
##  ARCH LM-test; Null hypothesis: no ARCH effects
## 
## data:  resseas3_usd
## Chi-squared = 7.3322, df = 12, p-value = 0.8349
autoplot(resseas1_usd^2 )+ theme_bw(); acf2(resseas1_usd^2, max.lag=MaxLag2)

##       [,1] [,2] [,3]  [,4]  [,5] [,6]  [,7]  [,8]  [,9] [,10] [,11] [,12] [,13]
## ACF  -0.08 0.07 0.09 -0.12 -0.10    0 -0.01 -0.04 -0.10  0.02 -0.10  0.01  0.14
## PACF -0.08 0.07 0.10 -0.12 -0.14    0  0.04 -0.03 -0.15 -0.01 -0.06  0.01  0.12
##      [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25]
## ACF  -0.03  0.15 -0.15 -0.03 -0.04 -0.09 -0.11  0.04 -0.01  0.07 -0.05  0.06
## PACF -0.03  0.11 -0.18 -0.04 -0.03 -0.04 -0.15  0.00  0.04  0.08 -0.07 -0.04
##      [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36] [,37]
## ACF   0.01  0.10 -0.03  0.07  0.02 -0.06  0.08  0.05 -0.06  0.06 -0.04 -0.09
## PACF  0.03  0.11 -0.11  0.04  0.02  0.02  0.08  0.09 -0.03  0.04 -0.10 -0.05
##      [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48] [,49]
## ACF  -0.09 -0.03 -0.03 -0.08  0.12 -0.10  0.09  0.06 -0.03  0.04  0.05 -0.09
## PACF -0.11 -0.02 -0.08 -0.04  0.08 -0.02  0.05  0.03 -0.01  0.01  0.00 -0.04
##      [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60] [,61]
## ACF   0.09 -0.04  0.04 -0.04 -0.08  0.01  0.03  0.14  0.09  0.10 -0.07 -0.09
## PACF  0.08  0.06  0.06  0.00 -0.14 -0.03  0.06  0.10  0.06  0.07 -0.12 -0.06
autoplot(resseas2_usd^2 )+ theme_bw(); acf2(resseas2_usd^2, max.lag=MaxLag2)

##       [,1] [,2] [,3]  [,4]  [,5] [,6]  [,7] [,8]  [,9] [,10] [,11] [,12] [,13]
## ACF  -0.14 0.10 0.03 -0.01 -0.02 0.12 -0.14 0.07 -0.09  0.01  0.09 -0.11  0.03
## PACF -0.14 0.09 0.06 -0.01 -0.03 0.12 -0.10 0.02 -0.07 -0.01  0.11 -0.10  0.02
##      [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25]
## ACF  -0.05  0.00 -0.11 -0.08 -0.07 -0.03 -0.05  0.06 -0.02 -0.01 -0.04  0.08
## PACF -0.06  0.03 -0.13 -0.13 -0.04 -0.06 -0.01  0.02  0.04 -0.01 -0.08  0.08
##      [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36] [,37]
## ACF  -0.04  0.15 -0.12  0.12 -0.05  0.02 -0.03  0.16 -0.01  0.15 -0.05  0.00
## PACF -0.05  0.16 -0.12  0.09 -0.03 -0.03 -0.03  0.08  0.10  0.06 -0.03 -0.04
##      [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48] [,49]
## ACF   0.03  0.00 -0.06 -0.03  0.02 -0.09  0.05 -0.10     0 -0.11  0.09 -0.12
## PACF  0.02  0.03 -0.13 -0.02  0.05 -0.03  0.00 -0.08     0 -0.11  0.06 -0.08
##      [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60] [,61]
## ACF   0.01 -0.06  0.06 -0.06  0.04 -0.05  0.12  0.02 -0.06  0.14 -0.05 -0.05
## PACF -0.01  0.05  0.03  0.01 -0.06  0.01  0.07  0.08 -0.10  0.11 -0.07 -0.08
autoplot(resseas3_usd^2 )+ theme_bw(); acf2(resseas3_usd^2, max.lag=MaxLag2)

##       [,1] [,2] [,3]  [,4]  [,5] [,6]  [,7]  [,8]  [,9] [,10] [,11] [,12] [,13]
## ACF  -0.12 0.12 0.00 -0.04 -0.08 0.03 -0.06  0.01 -0.04  0.01  0.04 -0.12  0.11
## PACF -0.12 0.11 0.03 -0.06 -0.09 0.03 -0.03 -0.01 -0.04  0.00  0.05 -0.13  0.08
##      [,14] [,15] [,16] [,17] [,18] [,19] [,20] [,21] [,22] [,23] [,24] [,25]
## ACF  -0.11  0.05 -0.09 -0.06 -0.05 -0.02 -0.07  0.06  0.05  0.04 -0.07  0.11
## PACF -0.07  0.03 -0.09 -0.10 -0.03 -0.03 -0.05  0.01  0.08  0.04 -0.12  0.11
##      [,26] [,27] [,28] [,29] [,30] [,31] [,32] [,33] [,34] [,35] [,36] [,37]
## ACF  -0.01  0.08 -0.04  0.13 -0.06  0.03  0.04  0.12  0.00  0.13     0 -0.03
## PACF  0.01  0.11 -0.08  0.12 -0.01  0.00  0.05  0.14  0.04  0.10     0  0.02
##      [,38] [,39] [,40] [,41] [,42] [,43] [,44] [,45] [,46] [,47] [,48] [,49]
## ACF   0.00 -0.05 -0.04 -0.06  0.06 -0.10  0.03 -0.11  0.00 -0.12  0.07 -0.15
## PACF -0.01  0.04 -0.08  0.02  0.05 -0.04 -0.03 -0.06 -0.01 -0.09 -0.01 -0.09
##      [,50] [,51] [,52] [,53] [,54] [,55] [,56] [,57] [,58] [,59] [,60] [,61]
## ACF   0.04 -0.06  0.07 -0.08  0.03 -0.01  0.05  0.17 -0.01  0.15 -0.08  0.02
## PACF -0.01 -0.03  0.03 -0.05 -0.06 -0.04  0.03  0.16 -0.04  0.13 -0.09 -0.03
### Performance de los modelos

pronostico_usd_1 <- window(series(modelseas1_usd,"forecast.forecasts"),start=inicio_test,end=c(2021,7))     
## specs have been added to the model: forecast
pronostico_usd_2 <- window(series(modelseas2_usd,"forecast.forecasts"),start=inicio_test,end=c(2021,7))     
## specs have been added to the model: forecast
pronostico_usd_3 <- window(series(modelseas3_usd,"forecast.forecasts"),start=inicio_test,end=c(2021,7))     
## specs have been added to the model: forecast
pronostico_usd_1_train  <- final(modelseas1_usd)            
pronostico_usd_2_train  <- final(modelseas2_usd)                    
pronostico_usd_3_train  <- final(modelseas3_usd)        
perfor_usd_train_mod_1 <- getPerformance(pronostico_usd_1_train, sactnetusd_train)
perfor_usd_train_mod_2 <- getPerformance(pronostico_usd_2_train, sactnetusd_train)
perfor_usd_train_mod_3 <- getPerformance(pronostico_usd_3_train, sactnetusd_train)


perfor_usd_test_mod_1 <- getPerformance(pronostico_usd_1[,1],sactnetusd_test)
perfor_usd_test_mod_2 <- getPerformance(pronostico_usd_2[,1],sactnetusd_test)
perfor_usd_test_mod_3 <- getPerformance(pronostico_usd_3[,1],sactnetusd_test)
data.frame(
  Models=c(
      "1. ARIMA (0 1 0)(1 0 1) Log",
      "2. ARIMA (0 1 0)(0 1 1) Niveles",
      "3. ARIMA (2 1 0)(1 0 0) Log"
    ),
AIC=c(AIC(modelseas1_usd),AIC(modelseas2_usd),AIC(modelseas3_usd)),
BIC=c(BIC(modelseas1_usd),BIC(modelseas2_usd),BIC(modelseas3_usd))
)%>%
  arrange(AIC)%>%
  knitr::kable(caption="Medidas de Ajuste: ARIMA Entrenamiento")
Medidas de Ajuste: ARIMA Entrenamiento
Models AIC BIC
3. ARIMA (2 1 0)(1 0 0) Log 1272.902 1280.976
2. ARIMA (0 1 0)(0 1 1) Niveles 1282.902 1290.976
1. ARIMA (0 1 0)(1 0 1) Log 1299.884 1307.958
Metricas_Sarima_usd <- data.frame(
Modelo = rep(
c(
"1.ARIMA (0 1 1)(1 1 0) Niveles",
"2.ARIMA (0 1 1)(0 1 1) Log",
"3.ARIMA (0 1 1)(0 1 1) Niveles"
),
2
),
Dataset = c(rep("Entrenamiento", 3), rep("Prueba", 3)),
rbind(
perfor_usd_train_mod_1,
perfor_usd_train_mod_2,
perfor_usd_train_mod_3,
perfor_usd_test_mod_1,
perfor_usd_test_mod_2,
perfor_usd_test_mod_3
)
)

#Metricas_Mod_Lin<- bind_rows(Metricas_HW_usd,Metricas_Sarima_usd)
Metricas_Mod_Lin<- Metricas_Sarima_usd

Metricas_Mod_Lin%>%
  knitr::kable()
Modelo Dataset MAE RSS MSE RMSE
1.ARIMA (0 1 1)(1 1 0) Niveles Entrenamiento 44.26093 360934.60 2958.480 54.39191
2.ARIMA (0 1 1)(0 1 1) Log Entrenamiento 40.61693 312328.78 2560.072 50.59715
3.ARIMA (0 1 1)(0 1 1) Niveles Entrenamiento 38.43133 255606.93 2095.139 45.77269
1.ARIMA (0 1 1)(1 1 0) Niveles Prueba 35.96765 12560.82 2512.163 50.12148
2.ARIMA (0 1 1)(0 1 1) Log Prueba 38.93622 10896.37 2179.274 46.68270
3.ARIMA (0 1 1)(0 1 1) Niveles Prueba 62.33028 22993.34 4598.669 67.81348

Métricas de Rendimiento Modelos Lineales

rbind(Metricas_Sarima_usd,
MetResHW_usd)%>%
  arrange(Dataset,MAE)%>%
  knitr::kable(caption="Metricas de Rendimiento sobre la muestra de prueba")
Metricas de Rendimiento sobre la muestra de prueba
Modelo Dataset MAE RSS MSE RMSE
3.ARIMA (0 1 1)(0 1 1) Niveles Entrenamiento 38.43133 255606.93 2095.139 45.77269
2.ARIMA (0 1 1)(0 1 1) Log Entrenamiento 40.61693 312328.78 2560.072 50.59715
1.ARIMA (0 1 1)(1 1 0) Niveles Entrenamiento 44.26093 360934.60 2958.480 54.39191
Holt Winter Aditivia Entrenamiento 53.54778 563335.58 4617.505 67.95222
Holt Winter Multiplicativa Entrenamiento 54.89003 598013.39 4901.749 70.01249
1.ARIMA (0 1 1)(1 1 0) Niveles Prueba 35.96765 12560.82 2512.163 50.12148
2.ARIMA (0 1 1)(0 1 1) Log Prueba 38.93622 10896.37 2179.274 46.68270
Holt Winter Aditivia Prueba 53.12659 16432.39 3286.479 57.32782
Holt Winter Multiplicativa Prueba 54.05022 18804.05 3760.810 61.32544
3.ARIMA (0 1 1)(0 1 1) Niveles Prueba 62.33028 22993.34 4598.669 67.81348
### Holt-Winter MULTIPLICATUVI
# ht2_multi_all <- hw(sactnetusd,seasonal="multiplicative",h = 5)
# autoplot(ht2_multi_all)+
#   theme_bw()

modelseas2_all<-seas(
x = sactnetusd,
transform.function = "log",
regression.aictest = NULL,
outlier = NULL,
arima.model = "(0 1 1)(0 1 1)"
)


autoplot(sactnetusd)+
  autolayer(window(series(modelseas2_all,"forecast.forecasts"),start=c(2021,8),end=c(2021,12)))+
  theme_bw()
## specs have been added to the model: forecast
## For a multivariate time series, specify a seriesname for each time series. Defaulting to column names.

3.2.2. Modelo No Lineal

Serie en Colones

TAR

Definir Parametros modelo TAR

# m orden
pm <- 1:3

mod.list.tar<-list()
AIC.best.list<-list()

AICM = NULL
model.best <- list(d=0, p1=0, p2=0)
AIC.best = 2964

for(l in pm){
  for(j in pm){
    for(i in pm){
      set.seed(777)
      model.tar.s = tar(sactnetcrc_train,p1=j,p2=i,d=l)
      mod.list.tar[[paste(j,i,l,sep="-")]]<-model.tar.s$AIC
      #print(paste(j,i,l,model.tar.s$AIC,sep="-"))    
      
      if (model.tar.s$AIC < AIC.best) {
            AIC.best = model.tar.s$AIC
            AIC.best.list[[paste(j,i,l,sep="-")]]<-AIC.best
            #print(AIC.best)
            model.best$d = l
            model.best$p1 = model.tar.s$p1
            model.best$p2 = model.tar.s$p2 
            print(paste(model.tar.s$p1,model.tar.s$p2,l,sep="-")) }
    }
  }
}
## [1] "0-3-1"
## [1] "3-1-1"
## [1] "3-3-1"
## [1] "3-1-2"
## [1] "3-2-2"
# AICTar<-bind_rows(mod.list.tar,.id = "Ordene-delay")%>%
#   arrange(`1`)
# 
# knitr::kable(head(AICTar,20))

AICTarBest<-bind_rows(AIC.best.list,.id = "Ordene-delay")%>%
  arrange(`1`)

knitr::kable(head(AICTarBest,20))
Ordene-delay 1
3-2-2 2924
3-1-2 2925
3-3-1 2929
3-1-1 2930
1-3-1 2936

Los tres mejores modelos

mod.tar1<-TSA::tar(sactnetcrc_train,p1=3,p2=2,d=1)  
mod.tar2<-TSA::tar(sactnetcrc_train,p1=3,p2=1,d=2)  
mod.tar3<-TSA::tar(sactnetcrc_train,p1=3,p2=3,d=1)  

mod.tar1$thd
##          
## 634783.1
mod.tar2$thd
##          
## 733818.8
mod.tar3$thd
##          
## 511224.5
mod.tar1$qr1$coefficients
## intercept-sactnetcrc_train      lag1-sactnetcrc_train 
##              21868.4352666                  0.7499834 
##      lag2-sactnetcrc_train      lag3-sactnetcrc_train 
##                 -0.3430847                  0.5609114
mod.tar2$qr1$coefficients
## intercept-sactnetcrc_train      lag1-sactnetcrc_train 
##              15867.9606105                  0.8874530 
##      lag2-sactnetcrc_train      lag3-sactnetcrc_train 
##                 -0.2851389                  0.3824949
mod.tar3$qr1$coefficients
## intercept-sactnetcrc_train      lag1-sactnetcrc_train 
##              38543.7938885                  0.6614203 
##      lag2-sactnetcrc_train      lag3-sactnetcrc_train 
##                 -0.7022195                  0.9513138
mod.tar1$qr2$coefficients
## intercept-sactnetcrc_train      lag1-sactnetcrc_train 
##               81703.567757                   0.897547
mod.tar2$qr2$coefficients
## intercept-sactnetcrc_train      lag1-sactnetcrc_train 
##              88446.1214206                  0.8892529
mod.tar3$qr2$coefficients
## intercept-sactnetcrc_train      lag1-sactnetcrc_train 
##             34728.59159827                 0.78818409 
##      lag2-sactnetcrc_train      lag3-sactnetcrc_train 
##                -0.05905747                 0.23115235
cbind(
Modelo=c("1.TAR p1=3,p2=2,d=1",
         "2.TAR p1=3,p2=1,d=2",
         "3.TAR p1=3,p2=3,d=1"),
AIC=c(mod.tar1$AIC,
mod.tar2$AIC,
mod.tar3$AIC))%>%
  knitr::kable()
Modelo AIC
1 1.TAR p1=3,p2=2,d=1 2930
1 2.TAR p1=3,p2=1,d=2 2925
1 3.TAR p1=3,p2=3,d=1 2929
#tsdiag(mod.tar1)
tsdiag(mod.tar2)

#tsdiag(mod.tar3)


checkresiduals(ts(mod.tar1$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

checkresiduals(ts(mod.tar2$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

checkresiduals(ts(mod.tar3$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

prontar1<- ts(as.vector(predict(mod.tar1,n.ahead=h.param,n.sim=1000)$fit),start=inicio_test,frequency = 12)
prontar2<- ts(as.vector(predict(mod.tar2,n.ahead=h.param,n.sim=1000)$fit),start=inicio_test,frequency = 12)
prontar3<- ts(as.vector(predict(mod.tar3,n.ahead=h.param,n.sim=1000)$fit),start=inicio_test,frequency = 12)

fit1<-ts(as.vector(mod.tar1$y)-as.vector(mod.tar1$residuals),start =inicio_train,frequency = 12)
## Warning in as.vector(mod.tar1$y) - as.vector(mod.tar1$residuals): longer object
## length is not a multiple of shorter object length
fit2<-ts(sactnetcrc_train-mod.tar2$residuals,start =inicio_train,frequency = 12)
## Warning in `-.default`(sactnetcrc_train, mod.tar2$residuals): longer object
## length is not a multiple of shorter object length
fit3<-ts(sactnetcrc_train-mod.tar3$residuals,start =inicio_train,frequency = 12)
## Warning in `-.default`(sactnetcrc_train, mod.tar3$residuals): longer object
## length is not a multiple of shorter object length
MetricasTARCRC<-data.frame(
  Modelo=rep(c("1.TAR p1=3,p2=2,d=1",
         "2.TAR p1=3,p2=1,d=2",
         "3.TAR p1=3,p2=3,d=1"),2),
DataSet= c(rep("Entrenamiento",3),rep("Prueba",3)),

rbind(getPerformance(fit1,sactnetcrc_train),
getPerformance(fit2,sactnetcrc_train),
getPerformance(fit3,sactnetcrc_train),

getPerformance(prontar1,sactnetcrc_test),
getPerformance(prontar2,sactnetcrc_test),
getPerformance(prontar3,sactnetcrc_test)))%>%
  arrange(DataSet,RMSE)

MetricasTARCRC%>%
  knitr::kable(caption="Metricas de Rendimiento Modelos TAR")
Metricas de Rendimiento Modelos TAR
Modelo DataSet MAE RSS MSE RMSE
3.TAR p1=3,p2=3,d=1 Entrenamiento 39772.97 328942351055 2696248779 51925.42
1.TAR p1=3,p2=2,d=1 Entrenamiento 40308.12 338913405336 2777978732 52706.53
2.TAR p1=3,p2=1,d=2 Entrenamiento 40514.30 351547834112 2881539624 53679.97
3.TAR p1=3,p2=3,d=1 Prueba 59009.97 30898688005 6179737601 78611.31
1.TAR p1=3,p2=2,d=1 Prueba 84725.95 62293400375 12458680075 111618.46
2.TAR p1=3,p2=1,d=2 Prueba 87059.11 64393263703 12878652741 113484.15
autoplot(sactnetcrc_train)+
  autolayer(fit1)+
  autolayer(fit2)+
  autolayer(fit3)+
  theme_bw()

autoplot(sactnetcrc_test)+
  autolayer(prontar1)+
  autolayer(prontar2)+
  autolayer(prontar3)+
  theme_bw()+
  scale_y_continuous(limits = c(500000,1400000))

SETAR

Thus the threshold delay, the number of lags in each regime and the threshold value are computed.

Setar1 <-
  selectSETAR(
    sactnetcrc_train, 
    include = c("const", "trend","none", "both"),
    m = 3,
    thDelay = seq(1, 2, by = 1),
    nthresh = 2,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 3 
## Using maximum autoregressive order for high regime: mH = 3 
## Using maximum autoregressive order for middle regime: mM = 3 
## Searching on 83 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  1494  combinations of thresholds ( 83 ), thDelay ( 2 ), mL ( 3 ) and MM ( 3 ) 
## 
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5Result of the one threshold search:
##  -Thresh:  778351.5  -Delay:  2  -mL: 3  -mH: 1  - AIC 2664.942 
## 
## Trim not respected:  0.1680672 0.697479 0.1344538 from 416644.8 778351.5
## Trim not respected:  0.1764706 0.6890756 0.1344538 from 418187.9 778351.5
## Trim not respected:  0.1848739 0.6806723 0.1344538 from 422751.8 778351.5
## Trim not respected:  0.1932773 0.6722689 0.1344538 from 438446.4 778351.5
## Trim not respected:  0.210084 0.6554622 0.1344538 from 470793.3 778351.5
## Trim not respected:  0.2184874 0.6470588 0.1344538 from 492453.3 778351.5
## Trim not respected:  0.2268908 0.6386555 0.1344538 from 496086.7 778351.5
## Trim not respected:  0.2352941 0.6302521 0.1344538 from 499552.3 778351.5
## Trim not respected:  0.2436975 0.6218487 0.1344538 from 510928.7 778351.5
## Trim not respected:  0.2521008 0.6134454 0.1344538 from 511224.5 778351.5
## Trim not respected:  0.2605042 0.605042 0.1344538 from 511791.8 778351.5
## Trim not respected:  0.2689076 0.5966387 0.1344538 from 511895.9 778351.5
## Trim not respected:  0.2773109 0.5882353 0.1344538 from 512476.7 778351.5
## Trim not respected:  0.2857143 0.5798319 0.1344538 from 521049.7 778351.5
## Trim not respected:  0.2941176 0.5714286 0.1344538 from 523480.9 778351.5
## Trim not respected:  0.302521 0.5630252 0.1344538 from 528105.4 778351.5
## Trim not respected:  0.3109244 0.5546218 0.1344538 from 529852.4 778351.5
## Trim not respected:  0.3193277 0.5462185 0.1344538 from 530010.3 778351.5
## Trim not respected:  0.3277311 0.5378151 0.1344538 from 542167.5 778351.5
## Trim not respected:  0.3361345 0.5294118 0.1344538 from 544430 778351.5
## Trim not respected:  0.3445378 0.5210084 0.1344538 from 546553.9 778351.5
## Trim not respected:  0.3529412 0.512605 0.1344538 from 550118.6 778351.5
## Trim not respected:  0.3613445 0.5042017 0.1344538 from 557325.5 778351.5
## Trim not respected:  0.3697479 0.4957983 0.1344538 from 561437.7 778351.5
## Trim not respected:  0.3781513 0.487395 0.1344538 from 561724.3 778351.5
## Trim not respected:  0.3865546 0.4789916 0.1344538 from 562865.8 778351.5
## Trim not respected:  0.394958 0.4705882 0.1344538 from 581770.8 778351.5
## Trim not respected:  0.4033613 0.4621849 0.1344538 from 582916.3 778351.5
## Trim not respected:  0.4117647 0.4537815 0.1344538 from 596791 778351.5
## Trim not respected:  0.4201681 0.4453782 0.1344538 from 600365 778351.5
## Trim not respected:  0.4285714 0.4369748 0.1344538 from 615605.6 778351.5
## Trim not respected:  0.4369748 0.4285714 0.1344538 from 620337.8 778351.5
## Trim not respected:  0.4453782 0.4201681 0.1344538 from 621904.5 778351.5
## Trim not respected:  0.4537815 0.4117647 0.1344538 from 622250.1 778351.5
## Trim not respected:  0.4621849 0.4033613 0.1344538 from 629474.1 778351.5
## Trim not respected:  0.4705882 0.394958 0.1344538 from 634783.1 778351.5
## Trim not respected:  0.4789916 0.3865546 0.1344538 from 639380.1 778351.5
## Trim not respected:  0.487395 0.3781513 0.1344538 from 641543.7 778351.5
## Trim not respected:  0.4957983 0.3697479 0.1344538 from 643093.6 778351.5
## Trim not respected:  0.5042017 0.3613445 0.1344538 from 648906.5 778351.5
## Trim not respected:  0.512605 0.3529412 0.1344538 from 654757 778351.5
## Trim not respected:  0.5210084 0.3445378 0.1344538 from 666744 778351.5
## Trim not respected:  0.5294118 0.3361345 0.1344538 from 682393.1 778351.5
## Trim not respected:  0.5378151 0.3277311 0.1344538 from 685707.6 778351.5
## Trim not respected:  0.5462185 0.3193277 0.1344538 from 687379.9 778351.5
## Trim not respected:  0.5546218 0.3109244 0.1344538 from 687628.7 778351.5
## Trim not respected:  0.5630252 0.302521 0.1344538 from 689025.3 778351.5
## Trim not respected:  0.5714286 0.2941176 0.1344538 from 694178.9 778351.5
## Trim not respected:  0.5798319 0.2857143 0.1344538 from 695528 778351.5
## Trim not respected:  0.5882353 0.2773109 0.1344538 from 695643.6 778351.5
## Trim not respected:  0.5966387 0.2689076 0.1344538 from 699826.5 778351.5
## Trim not respected:  0.605042 0.2605042 0.1344538 from 701399.4 778351.5
## Trim not respected:  0.6134454 0.2521008 0.1344538 from 701843.8 778351.5
## Trim not respected:  0.6218487 0.2436975 0.1344538 from 702153.4 778351.5
## Trim not respected:  0.6302521 0.2352941 0.1344538 from 703858 778351.5
## Trim not respected:  0.6386555 0.2268908 0.1344538 from 705830 778351.5
## Trim not respected:  0.6470588 0.2184874 0.1344538 from 708095.7 778351.5
## Trim not respected:  0.6554622 0.210084 0.1344538 from 713654.1 778351.5
## Trim not respected:  0.6638655 0.2016807 0.1344538 from 717484.9 778351.5
## Trim not respected:  0.6722689 0.1932773 0.1344538 from 718458 778351.5
## Trim not respected:  0.6806723 0.1848739 0.1344538 from 721993.2 778351.5
## Trim not respected:  0.6890756 0.1764706 0.1344538 from 722997.2 778351.5
## Trim not respected:  0.697479 0.1680672 0.1344538 from 723361.2 778351.5
## Trim not respected:  0.7058824 0.1596639 0.1344538 from 725680.7 778351.5
## Trim not respected:  0.7142857 0.1512605 0.1344538 from 726081.6 778351.5Second best: 705830 (conditionnal on th= 778351.5 and Delay= 2 )     SSR/AIC: 2662.995
## 
## Trim not respected:  0.6386555 0.2268908 0.1344538 from 705830 778351.5Second best: 778351.5 (conditionnal on th= 705830 and Delay= 2 )   SSR/AIC: 2662.995
## 
## Trim not respected:  0.1680672 0.697479 0.1344538 from 416644.8 778351.5
## Trim not respected:  0.1764706 0.6890756 0.1344538 from 418187.9 778351.5
## Trim not respected:  0.1848739 0.6806723 0.1344538 from 422751.8 778351.5
## Trim not respected:  0.1932773 0.6722689 0.1344538 from 438446.4 778351.5
## Trim not respected:  0.210084 0.6554622 0.1344538 from 470793.3 778351.5
## Trim not respected:  0.2184874 0.6470588 0.1344538 from 492453.3 778351.5
## Trim not respected:  0.2268908 0.6386555 0.1344538 from 496086.7 778351.5
## Trim not respected:  0.2352941 0.6302521 0.1344538 from 499552.3 778351.5
## Trim not respected:  0.2436975 0.6218487 0.1344538 from 510928.7 778351.5
## Trim not respected:  0.2521008 0.6134454 0.1344538 from 511224.5 778351.5
## Trim not respected:  0.2605042 0.605042 0.1344538 from 511791.8 778351.5
## Trim not respected:  0.2689076 0.5966387 0.1344538 from 511895.9 778351.5
## Trim not respected:  0.2773109 0.5882353 0.1344538 from 512476.7 778351.5
## Trim not respected:  0.2857143 0.5798319 0.1344538 from 521049.7 778351.5
## Trim not respected:  0.2941176 0.5714286 0.1344538 from 523480.9 778351.5
## Trim not respected:  0.302521 0.5630252 0.1344538 from 528105.4 778351.5
## Trim not respected:  0.3109244 0.5546218 0.1344538 from 529852.4 778351.5
## Trim not respected:  0.3193277 0.5462185 0.1344538 from 530010.3 778351.5
## Trim not respected:  0.3277311 0.5378151 0.1344538 from 542167.5 778351.5
## Trim not respected:  0.3361345 0.5294118 0.1344538 from 544430 778351.5
## Trim not respected:  0.3445378 0.5210084 0.1344538 from 546553.9 778351.5
## Trim not respected:  0.3529412 0.512605 0.1344538 from 550118.6 778351.5
## Trim not respected:  0.3613445 0.5042017 0.1344538 from 557325.5 778351.5
## Trim not respected:  0.3697479 0.4957983 0.1344538 from 561437.7 778351.5
## Trim not respected:  0.3781513 0.487395 0.1344538 from 561724.3 778351.5
## Trim not respected:  0.3865546 0.4789916 0.1344538 from 562865.8 778351.5
## Trim not respected:  0.394958 0.4705882 0.1344538 from 581770.8 778351.5
## Trim not respected:  0.4033613 0.4621849 0.1344538 from 582916.3 778351.5
## Trim not respected:  0.4117647 0.4537815 0.1344538 from 596791 778351.5
## Trim not respected:  0.4201681 0.4453782 0.1344538 from 600365 778351.5
## Trim not respected:  0.4285714 0.4369748 0.1344538 from 615605.6 778351.5
## Trim not respected:  0.4369748 0.4285714 0.1344538 from 620337.8 778351.5
## Trim not respected:  0.4453782 0.4201681 0.1344538 from 621904.5 778351.5
## Trim not respected:  0.4537815 0.4117647 0.1344538 from 622250.1 778351.5
## Trim not respected:  0.4621849 0.4033613 0.1344538 from 629474.1 778351.5
## Trim not respected:  0.4705882 0.394958 0.1344538 from 634783.1 778351.5
## Trim not respected:  0.4789916 0.3865546 0.1344538 from 639380.1 778351.5
## Trim not respected:  0.487395 0.3781513 0.1344538 from 641543.7 778351.5
## Trim not respected:  0.4957983 0.3697479 0.1344538 from 643093.6 778351.5
## Trim not respected:  0.5042017 0.3613445 0.1344538 from 648906.5 778351.5
## Trim not respected:  0.512605 0.3529412 0.1344538 from 654757 778351.5
## Trim not respected:  0.5210084 0.3445378 0.1344538 from 666744 778351.5
## Trim not respected:  0.5294118 0.3361345 0.1344538 from 682393.1 778351.5
## Trim not respected:  0.5378151 0.3277311 0.1344538 from 685707.6 778351.5
## Trim not respected:  0.5462185 0.3193277 0.1344538 from 687379.9 778351.5
## Trim not respected:  0.5546218 0.3109244 0.1344538 from 687628.7 778351.5
## Trim not respected:  0.5630252 0.302521 0.1344538 from 689025.3 778351.5
## Trim not respected:  0.5714286 0.2941176 0.1344538 from 694178.9 778351.5
## Trim not respected:  0.5798319 0.2857143 0.1344538 from 695528 778351.5
## Trim not respected:  0.5882353 0.2773109 0.1344538 from 695643.6 778351.5
## Trim not respected:  0.5966387 0.2689076 0.1344538 from 699826.5 778351.5
## Trim not respected:  0.605042 0.2605042 0.1344538 from 701399.4 778351.5
## Trim not respected:  0.6134454 0.2521008 0.1344538 from 701843.8 778351.5
## Trim not respected:  0.6218487 0.2436975 0.1344538 from 702153.4 778351.5
## Trim not respected:  0.6302521 0.2352941 0.1344538 from 703858 778351.5
## Trim not respected:  0.6386555 0.2268908 0.1344538 from 705830 778351.5
## Trim not respected:  0.6470588 0.2184874 0.1344538 from 708095.7 778351.5
## Trim not respected:  0.6554622 0.210084 0.1344538 from 713654.1 778351.5
## Trim not respected:  0.6638655 0.2016807 0.1344538 from 717484.9 778351.5
## Trim not respected:  0.6722689 0.1932773 0.1344538 from 718458 778351.5
## Trim not respected:  0.6806723 0.1848739 0.1344538 from 721993.2 778351.5
## Trim not respected:  0.6890756 0.1764706 0.1344538 from 722997.2 778351.5
## Trim not respected:  0.697479 0.1680672 0.1344538 from 723361.2 778351.5
## Trim not respected:  0.7058824 0.1596639 0.1344538 from 725680.7 778351.5
## Trim not respected:  0.7142857 0.1512605 0.1344538 from 726081.6 778351.5Second best: 705830 (conditionnal on th= 778351.5 and Delay= 2 )     SSR/AIC: 2660.377
## 
## Trim not respected:  0.6386555 0.2268908 0.1344538 from 705830 778351.5Second best: 771089.2 (conditionnal on th= 705830 and Delay= 2 )   SSR/AIC: 2660.162
## 
## Trim not respected:  0.1680672 0.697479 0.1344538 from 416644.8 778351.5
## Trim not respected:  0.1764706 0.6890756 0.1344538 from 418187.9 778351.5
## Trim not respected:  0.1848739 0.6806723 0.1344538 from 422751.8 778351.5
## Trim not respected:  0.1932773 0.6722689 0.1344538 from 438446.4 778351.5
## Trim not respected:  0.210084 0.6554622 0.1344538 from 470793.3 778351.5
## Trim not respected:  0.2184874 0.6470588 0.1344538 from 492453.3 778351.5
## Trim not respected:  0.2268908 0.6386555 0.1344538 from 496086.7 778351.5
## Trim not respected:  0.2352941 0.6302521 0.1344538 from 499552.3 778351.5
## Trim not respected:  0.2436975 0.6218487 0.1344538 from 510928.7 778351.5
## Trim not respected:  0.2521008 0.6134454 0.1344538 from 511224.5 778351.5
## Trim not respected:  0.2605042 0.605042 0.1344538 from 511791.8 778351.5
## Trim not respected:  0.2689076 0.5966387 0.1344538 from 511895.9 778351.5
## Trim not respected:  0.2773109 0.5882353 0.1344538 from 512476.7 778351.5
## Trim not respected:  0.2857143 0.5798319 0.1344538 from 521049.7 778351.5
## Trim not respected:  0.2941176 0.5714286 0.1344538 from 523480.9 778351.5
## Trim not respected:  0.302521 0.5630252 0.1344538 from 528105.4 778351.5
## Trim not respected:  0.3109244 0.5546218 0.1344538 from 529852.4 778351.5
## Trim not respected:  0.3193277 0.5462185 0.1344538 from 530010.3 778351.5
## Trim not respected:  0.3277311 0.5378151 0.1344538 from 542167.5 778351.5
## Trim not respected:  0.3361345 0.5294118 0.1344538 from 544430 778351.5
## Trim not respected:  0.3445378 0.5210084 0.1344538 from 546553.9 778351.5
## Trim not respected:  0.3529412 0.512605 0.1344538 from 550118.6 778351.5
## Trim not respected:  0.3613445 0.5042017 0.1344538 from 557325.5 778351.5
## Trim not respected:  0.3697479 0.4957983 0.1344538 from 561437.7 778351.5
## Trim not respected:  0.3781513 0.487395 0.1344538 from 561724.3 778351.5
## Trim not respected:  0.3865546 0.4789916 0.1344538 from 562865.8 778351.5
## Trim not respected:  0.394958 0.4705882 0.1344538 from 581770.8 778351.5
## Trim not respected:  0.4033613 0.4621849 0.1344538 from 582916.3 778351.5
## Trim not respected:  0.4117647 0.4537815 0.1344538 from 596791 778351.5
## Trim not respected:  0.4201681 0.4453782 0.1344538 from 600365 778351.5
## Trim not respected:  0.4285714 0.4369748 0.1344538 from 615605.6 778351.5
## Trim not respected:  0.4369748 0.4285714 0.1344538 from 620337.8 778351.5
## Trim not respected:  0.4453782 0.4201681 0.1344538 from 621904.5 778351.5
## Trim not respected:  0.4537815 0.4117647 0.1344538 from 622250.1 778351.5
## Trim not respected:  0.4621849 0.4033613 0.1344538 from 629474.1 778351.5
## Trim not respected:  0.4705882 0.394958 0.1344538 from 634783.1 778351.5
## Trim not respected:  0.4789916 0.3865546 0.1344538 from 639380.1 778351.5
## Trim not respected:  0.487395 0.3781513 0.1344538 from 641543.7 778351.5
## Trim not respected:  0.4957983 0.3697479 0.1344538 from 643093.6 778351.5
## Trim not respected:  0.5042017 0.3613445 0.1344538 from 648906.5 778351.5
## Trim not respected:  0.512605 0.3529412 0.1344538 from 654757 778351.5
## Trim not respected:  0.5210084 0.3445378 0.1344538 from 666744 778351.5
## Trim not respected:  0.5294118 0.3361345 0.1344538 from 682393.1 778351.5
## Trim not respected:  0.5378151 0.3277311 0.1344538 from 685707.6 778351.5
## Trim not respected:  0.5462185 0.3193277 0.1344538 from 687379.9 778351.5
## Trim not respected:  0.5546218 0.3109244 0.1344538 from 687628.7 778351.5
## Trim not respected:  0.5630252 0.302521 0.1344538 from 689025.3 778351.5
## Trim not respected:  0.5714286 0.2941176 0.1344538 from 694178.9 778351.5
## Trim not respected:  0.5798319 0.2857143 0.1344538 from 695528 778351.5
## Trim not respected:  0.5882353 0.2773109 0.1344538 from 695643.6 778351.5
## Trim not respected:  0.5966387 0.2689076 0.1344538 from 699826.5 778351.5
## Trim not respected:  0.605042 0.2605042 0.1344538 from 701399.4 778351.5
## Trim not respected:  0.6134454 0.2521008 0.1344538 from 701843.8 778351.5
## Trim not respected:  0.6218487 0.2436975 0.1344538 from 702153.4 778351.5
## Trim not respected:  0.6302521 0.2352941 0.1344538 from 703858 778351.5
## Trim not respected:  0.6386555 0.2268908 0.1344538 from 705830 778351.5
## Trim not respected:  0.6470588 0.2184874 0.1344538 from 708095.7 778351.5
## Trim not respected:  0.6554622 0.210084 0.1344538 from 713654.1 778351.5
## Trim not respected:  0.6638655 0.2016807 0.1344538 from 717484.9 778351.5
## Trim not respected:  0.6722689 0.1932773 0.1344538 from 718458 778351.5
## Trim not respected:  0.6806723 0.1848739 0.1344538 from 721993.2 778351.5
## Trim not respected:  0.6890756 0.1764706 0.1344538 from 722997.2 778351.5
## Trim not respected:  0.697479 0.1680672 0.1344538 from 723361.2 778351.5
## Trim not respected:  0.7058824 0.1596639 0.1344538 from 725680.7 778351.5
## Trim not respected:  0.7142857 0.1512605 0.1344538 from 726081.6 778351.5Second best: 705830 (conditionnal on th= 778351.5 and Delay= 2 )     SSR/AIC: 2662.311
## 
## Trim not respected:  0.6386555 0.2268908 0.1344538 from 705830 778351.5Second best: 771089.2 (conditionnal on th= 705830 and Delay= 2 )   SSR/AIC: 2661.953

Setar2 <-
  selectSETAR(
    sactnetcrc_train,
    m = 3,
    d=2,
    thDelay = seq(1, 2, by = 1),
    nthresh = 2,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 3 
## Using maximum autoregressive order for high regime: mH = 3 
## Using maximum autoregressive order for middle regime: mM = 3 
## Searching on 80 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  1440  combinations of thresholds ( 80 ), thDelay ( 2 ), mL ( 3 ) and MM ( 3 ) 
## 
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2Result of the one threshold search:
##  -Thresh:  771089.2  -Delay:  2  -mL: 2  -mH: 1  - AIC 2714.372 
## 
## Trim not respected:  0.1896552 0.6896552 0.1206897 from 422751.8 771089.2
## Trim not respected:  0.1982759 0.6810345 0.1206897 from 438446.4 771089.2
## Trim not respected:  0.2155172 0.6637931 0.1206897 from 470793.3 771089.2
## Trim not respected:  0.2241379 0.6551724 0.1206897 from 492453.3 771089.2
## Trim not respected:  0.2327586 0.6465517 0.1206897 from 496086.7 771089.2
## Trim not respected:  0.2413793 0.637931 0.1206897 from 499552.3 771089.2
## Trim not respected:  0.25 0.6293103 0.1206897 from 510928.7 771089.2
## Trim not respected:  0.2586207 0.6206897 0.1206897 from 511224.5 771089.2
## Trim not respected:  0.2672414 0.612069 0.1206897 from 511791.8 771089.2
## Trim not respected:  0.2758621 0.6034483 0.1206897 from 511895.9 771089.2
## Trim not respected:  0.2844828 0.5948276 0.1206897 from 512476.7 771089.2
## Trim not respected:  0.2931034 0.5862069 0.1206897 from 521049.7 771089.2
## Trim not respected:  0.3017241 0.5775862 0.1206897 from 523480.9 771089.2
## Trim not respected:  0.3103448 0.5689655 0.1206897 from 528105.4 771089.2
## Trim not respected:  0.3189655 0.5603448 0.1206897 from 529852.4 771089.2
## Trim not respected:  0.3275862 0.5517241 0.1206897 from 530010.3 771089.2
## Trim not respected:  0.3362069 0.5431034 0.1206897 from 542167.5 771089.2
## Trim not respected:  0.3448276 0.5344828 0.1206897 from 544430 771089.2
## Trim not respected:  0.3534483 0.5258621 0.1206897 from 546553.9 771089.2
## Trim not respected:  0.362069 0.5172414 0.1206897 from 550118.6 771089.2
## Trim not respected:  0.3706897 0.5086207 0.1206897 from 557325.5 771089.2
## Trim not respected:  0.3793103 0.5 0.1206897 from 561437.7 771089.2
## Trim not respected:  0.387931 0.4913793 0.1206897 from 561724.3 771089.2
## Trim not respected:  0.3965517 0.4827586 0.1206897 from 562865.8 771089.2
## Trim not respected:  0.4051724 0.4741379 0.1206897 from 581770.8 771089.2
## Trim not respected:  0.4137931 0.4655172 0.1206897 from 582916.3 771089.2
## Trim not respected:  0.4224138 0.4568966 0.1206897 from 596791 771089.2
## Trim not respected:  0.4310345 0.4482759 0.1206897 from 600365 771089.2
## Trim not respected:  0.4396552 0.4396552 0.1206897 from 615605.6 771089.2
## Trim not respected:  0.4482759 0.4310345 0.1206897 from 620337.8 771089.2
## Trim not respected:  0.4568966 0.4224138 0.1206897 from 621904.5 771089.2
## Trim not respected:  0.4655172 0.4137931 0.1206897 from 622250.1 771089.2
## Trim not respected:  0.4741379 0.4051724 0.1206897 from 629474.1 771089.2
## Trim not respected:  0.4827586 0.3965517 0.1206897 from 634783.1 771089.2
## Trim not respected:  0.4913793 0.387931 0.1206897 from 639380.1 771089.2
## Trim not respected:  0.5 0.3793103 0.1206897 from 641543.7 771089.2
## Trim not respected:  0.5086207 0.3706897 0.1206897 from 643093.6 771089.2
## Trim not respected:  0.5172414 0.362069 0.1206897 from 648906.5 771089.2
## Trim not respected:  0.5258621 0.3534483 0.1206897 from 654757 771089.2
## Trim not respected:  0.5344828 0.3448276 0.1206897 from 666744 771089.2
## Trim not respected:  0.5431034 0.3362069 0.1206897 from 682393.1 771089.2
## Trim not respected:  0.5517241 0.3275862 0.1206897 from 685707.6 771089.2
## Trim not respected:  0.5603448 0.3189655 0.1206897 from 687379.9 771089.2
## Trim not respected:  0.5689655 0.3103448 0.1206897 from 687628.7 771089.2
## Trim not respected:  0.5775862 0.3017241 0.1206897 from 689025.3 771089.2
## Trim not respected:  0.5862069 0.2931034 0.1206897 from 694178.9 771089.2
## Trim not respected:  0.5948276 0.2844828 0.1206897 from 695528 771089.2
## Trim not respected:  0.6034483 0.2758621 0.1206897 from 695643.6 771089.2
## Trim not respected:  0.612069 0.2672414 0.1206897 from 699826.5 771089.2
## Trim not respected:  0.6206897 0.2586207 0.1206897 from 701399.4 771089.2
## Trim not respected:  0.6293103 0.25 0.1206897 from 701843.8 771089.2
## Trim not respected:  0.637931 0.2413793 0.1206897 from 702153.4 771089.2
## Trim not respected:  0.6465517 0.2327586 0.1206897 from 703858 771089.2
## Trim not respected:  0.6551724 0.2241379 0.1206897 from 705830 771089.2
## Trim not respected:  0.6637931 0.2155172 0.1206897 from 708095.7 771089.2
## Trim not respected:  0.6724138 0.2068966 0.1206897 from 713654.1 771089.2
## Trim not respected:  0.6810345 0.1982759 0.1206897 from 717484.9 771089.2
## Trim not respected:  0.6896552 0.1896552 0.1206897 from 718458 771089.2
## Trim not respected:  0.6982759 0.1810345 0.1206897 from 721993.2 771089.2
## Trim not respected:  0.7068966 0.1724138 0.1206897 from 722997.2 771089.2
## Trim not respected:  0.7155172 0.1637931 0.1206897 from 723361.2 771089.2
## Trim not respected:  0.7241379 0.1551724 0.1206897 from 725680.7 771089.2Second best: 725680.7 (conditionnal on th= 771089.2 and Delay= 2 )   SSR/AIC: 2715.108
## Second best: 529852.4 (conditionnal on th= 725680.7 and Delay= 2 )    SSR/AIC: 2732.59
## 
## Trim not respected:  0.1896552 0.6896552 0.1206897 from 422751.8 771089.2
## Trim not respected:  0.1982759 0.6810345 0.1206897 from 438446.4 771089.2
## Trim not respected:  0.2155172 0.6637931 0.1206897 from 470793.3 771089.2
## Trim not respected:  0.2241379 0.6551724 0.1206897 from 492453.3 771089.2
## Trim not respected:  0.2327586 0.6465517 0.1206897 from 496086.7 771089.2
## Trim not respected:  0.2413793 0.637931 0.1206897 from 499552.3 771089.2
## Trim not respected:  0.25 0.6293103 0.1206897 from 510928.7 771089.2
## Trim not respected:  0.2586207 0.6206897 0.1206897 from 511224.5 771089.2
## Trim not respected:  0.2672414 0.612069 0.1206897 from 511791.8 771089.2
## Trim not respected:  0.2758621 0.6034483 0.1206897 from 511895.9 771089.2
## Trim not respected:  0.2844828 0.5948276 0.1206897 from 512476.7 771089.2
## Trim not respected:  0.2931034 0.5862069 0.1206897 from 521049.7 771089.2
## Trim not respected:  0.3017241 0.5775862 0.1206897 from 523480.9 771089.2
## Trim not respected:  0.3103448 0.5689655 0.1206897 from 528105.4 771089.2
## Trim not respected:  0.3189655 0.5603448 0.1206897 from 529852.4 771089.2
## Trim not respected:  0.3275862 0.5517241 0.1206897 from 530010.3 771089.2
## Trim not respected:  0.3362069 0.5431034 0.1206897 from 542167.5 771089.2
## Trim not respected:  0.3448276 0.5344828 0.1206897 from 544430 771089.2
## Trim not respected:  0.3534483 0.5258621 0.1206897 from 546553.9 771089.2
## Trim not respected:  0.362069 0.5172414 0.1206897 from 550118.6 771089.2
## Trim not respected:  0.3706897 0.5086207 0.1206897 from 557325.5 771089.2
## Trim not respected:  0.3793103 0.5 0.1206897 from 561437.7 771089.2
## Trim not respected:  0.387931 0.4913793 0.1206897 from 561724.3 771089.2
## Trim not respected:  0.3965517 0.4827586 0.1206897 from 562865.8 771089.2
## Trim not respected:  0.4051724 0.4741379 0.1206897 from 581770.8 771089.2
## Trim not respected:  0.4137931 0.4655172 0.1206897 from 582916.3 771089.2
## Trim not respected:  0.4224138 0.4568966 0.1206897 from 596791 771089.2
## Trim not respected:  0.4310345 0.4482759 0.1206897 from 600365 771089.2
## Trim not respected:  0.4396552 0.4396552 0.1206897 from 615605.6 771089.2
## Trim not respected:  0.4482759 0.4310345 0.1206897 from 620337.8 771089.2
## Trim not respected:  0.4568966 0.4224138 0.1206897 from 621904.5 771089.2
## Trim not respected:  0.4655172 0.4137931 0.1206897 from 622250.1 771089.2
## Trim not respected:  0.4741379 0.4051724 0.1206897 from 629474.1 771089.2
## Trim not respected:  0.4827586 0.3965517 0.1206897 from 634783.1 771089.2
## Trim not respected:  0.4913793 0.387931 0.1206897 from 639380.1 771089.2
## Trim not respected:  0.5 0.3793103 0.1206897 from 641543.7 771089.2
## Trim not respected:  0.5086207 0.3706897 0.1206897 from 643093.6 771089.2
## Trim not respected:  0.5172414 0.362069 0.1206897 from 648906.5 771089.2
## Trim not respected:  0.5258621 0.3534483 0.1206897 from 654757 771089.2
## Trim not respected:  0.5344828 0.3448276 0.1206897 from 666744 771089.2
## Trim not respected:  0.5431034 0.3362069 0.1206897 from 682393.1 771089.2
## Trim not respected:  0.5517241 0.3275862 0.1206897 from 685707.6 771089.2
## Trim not respected:  0.5603448 0.3189655 0.1206897 from 687379.9 771089.2
## Trim not respected:  0.5689655 0.3103448 0.1206897 from 687628.7 771089.2
## Trim not respected:  0.5775862 0.3017241 0.1206897 from 689025.3 771089.2
## Trim not respected:  0.5862069 0.2931034 0.1206897 from 694178.9 771089.2
## Trim not respected:  0.5948276 0.2844828 0.1206897 from 695528 771089.2
## Trim not respected:  0.6034483 0.2758621 0.1206897 from 695643.6 771089.2
## Trim not respected:  0.612069 0.2672414 0.1206897 from 699826.5 771089.2
## Trim not respected:  0.6206897 0.2586207 0.1206897 from 701399.4 771089.2
## Trim not respected:  0.6293103 0.25 0.1206897 from 701843.8 771089.2
## Trim not respected:  0.637931 0.2413793 0.1206897 from 702153.4 771089.2
## Trim not respected:  0.6465517 0.2327586 0.1206897 from 703858 771089.2
## Trim not respected:  0.6551724 0.2241379 0.1206897 from 705830 771089.2
## Trim not respected:  0.6637931 0.2155172 0.1206897 from 708095.7 771089.2
## Trim not respected:  0.6724138 0.2068966 0.1206897 from 713654.1 771089.2
## Trim not respected:  0.6810345 0.1982759 0.1206897 from 717484.9 771089.2
## Trim not respected:  0.6896552 0.1896552 0.1206897 from 718458 771089.2
## Trim not respected:  0.6982759 0.1810345 0.1206897 from 721993.2 771089.2
## Trim not respected:  0.7068966 0.1724138 0.1206897 from 722997.2 771089.2
## Trim not respected:  0.7155172 0.1637931 0.1206897 from 723361.2 771089.2
## Trim not respected:  0.7241379 0.1551724 0.1206897 from 725680.7 771089.2Second best: 725680.7 (conditionnal on th= 771089.2 and Delay= 2 )   SSR/AIC: 2715.69
## Second best: 521049.7 (conditionnal on th= 725680.7 and Delay= 2 )    SSR/AIC: 2730.675
## 
## Trim not respected:  0.1896552 0.6896552 0.1206897 from 422751.8 771089.2
## Trim not respected:  0.1982759 0.6810345 0.1206897 from 438446.4 771089.2
## Trim not respected:  0.2155172 0.6637931 0.1206897 from 470793.3 771089.2
## Trim not respected:  0.2241379 0.6551724 0.1206897 from 492453.3 771089.2
## Trim not respected:  0.2327586 0.6465517 0.1206897 from 496086.7 771089.2
## Trim not respected:  0.2413793 0.637931 0.1206897 from 499552.3 771089.2
## Trim not respected:  0.25 0.6293103 0.1206897 from 510928.7 771089.2
## Trim not respected:  0.2586207 0.6206897 0.1206897 from 511224.5 771089.2
## Trim not respected:  0.2672414 0.612069 0.1206897 from 511791.8 771089.2
## Trim not respected:  0.2758621 0.6034483 0.1206897 from 511895.9 771089.2
## Trim not respected:  0.2844828 0.5948276 0.1206897 from 512476.7 771089.2
## Trim not respected:  0.2931034 0.5862069 0.1206897 from 521049.7 771089.2
## Trim not respected:  0.3017241 0.5775862 0.1206897 from 523480.9 771089.2
## Trim not respected:  0.3103448 0.5689655 0.1206897 from 528105.4 771089.2
## Trim not respected:  0.3189655 0.5603448 0.1206897 from 529852.4 771089.2
## Trim not respected:  0.3275862 0.5517241 0.1206897 from 530010.3 771089.2
## Trim not respected:  0.3362069 0.5431034 0.1206897 from 542167.5 771089.2
## Trim not respected:  0.3448276 0.5344828 0.1206897 from 544430 771089.2
## Trim not respected:  0.3534483 0.5258621 0.1206897 from 546553.9 771089.2
## Trim not respected:  0.362069 0.5172414 0.1206897 from 550118.6 771089.2
## Trim not respected:  0.3706897 0.5086207 0.1206897 from 557325.5 771089.2
## Trim not respected:  0.3793103 0.5 0.1206897 from 561437.7 771089.2
## Trim not respected:  0.387931 0.4913793 0.1206897 from 561724.3 771089.2
## Trim not respected:  0.3965517 0.4827586 0.1206897 from 562865.8 771089.2
## Trim not respected:  0.4051724 0.4741379 0.1206897 from 581770.8 771089.2
## Trim not respected:  0.4137931 0.4655172 0.1206897 from 582916.3 771089.2
## Trim not respected:  0.4224138 0.4568966 0.1206897 from 596791 771089.2
## Trim not respected:  0.4310345 0.4482759 0.1206897 from 600365 771089.2
## Trim not respected:  0.4396552 0.4396552 0.1206897 from 615605.6 771089.2
## Trim not respected:  0.4482759 0.4310345 0.1206897 from 620337.8 771089.2
## Trim not respected:  0.4568966 0.4224138 0.1206897 from 621904.5 771089.2
## Trim not respected:  0.4655172 0.4137931 0.1206897 from 622250.1 771089.2
## Trim not respected:  0.4741379 0.4051724 0.1206897 from 629474.1 771089.2
## Trim not respected:  0.4827586 0.3965517 0.1206897 from 634783.1 771089.2
## Trim not respected:  0.4913793 0.387931 0.1206897 from 639380.1 771089.2
## Trim not respected:  0.5 0.3793103 0.1206897 from 641543.7 771089.2
## Trim not respected:  0.5086207 0.3706897 0.1206897 from 643093.6 771089.2
## Trim not respected:  0.5172414 0.362069 0.1206897 from 648906.5 771089.2
## Trim not respected:  0.5258621 0.3534483 0.1206897 from 654757 771089.2
## Trim not respected:  0.5344828 0.3448276 0.1206897 from 666744 771089.2
## Trim not respected:  0.5431034 0.3362069 0.1206897 from 682393.1 771089.2
## Trim not respected:  0.5517241 0.3275862 0.1206897 from 685707.6 771089.2
## Trim not respected:  0.5603448 0.3189655 0.1206897 from 687379.9 771089.2
## Trim not respected:  0.5689655 0.3103448 0.1206897 from 687628.7 771089.2
## Trim not respected:  0.5775862 0.3017241 0.1206897 from 689025.3 771089.2
## Trim not respected:  0.5862069 0.2931034 0.1206897 from 694178.9 771089.2
## Trim not respected:  0.5948276 0.2844828 0.1206897 from 695528 771089.2
## Trim not respected:  0.6034483 0.2758621 0.1206897 from 695643.6 771089.2
## Trim not respected:  0.612069 0.2672414 0.1206897 from 699826.5 771089.2
## Trim not respected:  0.6206897 0.2586207 0.1206897 from 701399.4 771089.2
## Trim not respected:  0.6293103 0.25 0.1206897 from 701843.8 771089.2
## Trim not respected:  0.637931 0.2413793 0.1206897 from 702153.4 771089.2
## Trim not respected:  0.6465517 0.2327586 0.1206897 from 703858 771089.2
## Trim not respected:  0.6551724 0.2241379 0.1206897 from 705830 771089.2
## Trim not respected:  0.6637931 0.2155172 0.1206897 from 708095.7 771089.2
## Trim not respected:  0.6724138 0.2068966 0.1206897 from 713654.1 771089.2
## Trim not respected:  0.6810345 0.1982759 0.1206897 from 717484.9 771089.2
## Trim not respected:  0.6896552 0.1896552 0.1206897 from 718458 771089.2
## Trim not respected:  0.6982759 0.1810345 0.1206897 from 721993.2 771089.2
## Trim not respected:  0.7068966 0.1724138 0.1206897 from 722997.2 771089.2
## Trim not respected:  0.7155172 0.1637931 0.1206897 from 723361.2 771089.2
## Trim not respected:  0.7241379 0.1551724 0.1206897 from 725680.7 771089.2Second best: 725680.7 (conditionnal on th= 771089.2 and Delay= 2 )   SSR/AIC: 2717.616
## Second best: 521049.7 (conditionnal on th= 725680.7 and Delay= 2 )    SSR/AIC: 2731.29

Setar3 <-
  selectSETAR(
    sactnetcrc_train,
    m = 3,
    thDelay = seq(0, 2, by = 1),
    nthresh = 1,
    d = 1,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 3 
## Using maximum autoregressive order for high regime: mH = 3 
## Searching on 83 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  2241  combinations of thresholds ( 83 ), thDelay ( 3 ), mL ( 3 ) and MM ( 3 ) 
## 
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
##  1 T: Trim not respected:  0.8571429 0.1428571 from th: 778351.5
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5

Setar4 <-
  selectSETAR(
    sactnetcrc_train,
    m = 3,
    thDelay = seq(0, 2, by = 1),
    nthresh = 1,
    d = 2,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 3 
## Using maximum autoregressive order for high regime: mH = 3 
## Searching on 80 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  2160  combinations of thresholds ( 80 ), thDelay ( 3 ), mL ( 3 ) and MM ( 3 ) 
## 
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
##  1 T: Trim not respected:  0.862069 0.137931 from th: 771089.2
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2

Setar1$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       2  3  1 778351.5 2664.942
## 2       2  3  1 730317.6 2664.952
## 3       2  3  1 733818.8 2665.123
## 4       2  3  1 748435.4 2665.137
## 5       2  3  1 748409.3 2665.149
Setar2$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       2  2  1 771089.2 2714.372
## 2       2  2  3 771089.2 2715.404
## 3       2  2  2 771089.2 2715.629
## 4       2  3  1 771089.2 2716.262
## 5       2  3  3 771089.2 2717.291
Setar3$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       2  3  1 778351.5 2664.942
## 2       2  3  1 730317.6 2664.952
## 3       2  3  1 733818.8 2665.123
## 4       2  3  1 748435.4 2665.137
## 5       2  3  1 748409.3 2665.149
Setar4$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       2  2  1 771089.2 2714.372
## 2       2  2  3 771089.2 2715.404
## 3       2  2  2 771089.2 2715.629
## 4       0  2  1 765999.6 2715.832
## 5       2  3  1 771089.2 2716.262
modeloas1 <-
  setar(
    sactnetcrc_train,
    m = 3,
    mL = 3,
    mH = 1,
    d=1,
    nthresh = 1,
    thDelay = 2,
    type = "level"
  )
## 
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
## Warning: 
## With the threshold you gave (778351.4845) there is a regime with less than trim=15% observations (86.55%, 13.45%, )
## 
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 778351.5
## Raiz Unitaria
summary(modeloas1) #residuals variance = 0.005525,  AIC = -632, MAPE = 0.4352%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##       const.L        phiL.1        phiL.2        phiL.3 
## 15609.1159299     0.8269447    -0.2758435     0.4374676 
## 
## High regime:
##        const.H         phiH.1 
## 115310.2619648      0.8862372 
## 
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (0)X(t-1)+ (1)X(t-2)
## -Value: 778351
## Proportion of points in low regime: 86.55%    High regime: 13.45% 
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -154452.7  -35472.8   -2474.2   30816.0  172187.3 
## 
## Fit:
## residuals variance = 2734001726,  AIC = 2665, MAPE = 6.472%
## 
## Coefficient(s):
## 
##              Estimate    Std. Error  t value              Pr(>|t|)    
## const.L  15609.115930  23870.032713   0.6539             0.5144570    
## phiL.1       0.826945      0.107621   7.6838     0.000000000005423 ***
## phiL.2      -0.275843      0.142709  -1.9329             0.0556856 .  
## phiL.3       0.437468      0.113051   3.8696             0.0001805 ***
## const.H 115310.261965  68150.181595   1.6920             0.0933301 .  
## phiH.1       0.886237      0.075633  11.7175 < 0.00000000000000022 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (0) X(t) + (0) X(t-1)+ (1) X(t-2)
## 
## Value: 778351
# plot(modeloas1)
checkresiduals(ts(modeloas1$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

modeloas2 <-
  setar(
    sactnetcrc_train,
    m = 3,
    mL = 2,
    mH = 3,
    d=2,
    nthresh = 1,
    thDelay = 2,
    type = "level"
  )
## 
##  1 T: Trim not respected:  0.8534483 0.1465517 from th: 766323.9
##  1 T: Trim not respected:  0.862069 0.137931 from th: 768336.6
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
## Warning: 
## With the threshold you gave (771089.1847) there is a regime with less than trim=15% observations (87.93%, 12.07%, )
## 
##  1 T: Trim not respected:  0.8793103 0.1206897 from th: 771089.2
## Warning: Possible unit root in the high regime. Roots are: 0.9412 1.4517 1.4517
## Raiz Unitaria
summary(modeloas2) # residuals variance = 0.005857,  AIC = -635, MAPE = 0.4584%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##       const.L        phiL.1        phiL.2 
## 34071.1080807     0.4627114     0.5021382 
## 
## High regime:
##        const.H         phiH.1         phiH.2         phiH.3 
## -77861.2418850      1.0319769     -0.4420645      0.5041543 
## 
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (0)X(t-1)+ (1)X(t-2)
## -Value: 771089
## Proportion of points in low regime: 87.93%    High regime: 12.07% 
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -142122.0  -39133.9   -6568.6   40446.3  200204.4 
## 
## Fit:
## residuals variance = 4067367517,  AIC = 2715, MAPE = 7.964%
## 
## Coefficient(s):
## 
##              Estimate    Std. Error  t value     Pr(>|t|)    
## const.L  34071.108081  28347.172029   1.2019       0.2319    
## phiL.1       0.462711      0.091475   5.0583 0.0000016165 ***
## phiL.2       0.502138      0.092180   5.4474 0.0000002944 ***
## const.H -77861.241885 197462.308180  -0.3943       0.6941    
## phiH.1       1.031977      0.220265   4.6852 0.0000077304 ***
## phiH.2      -0.442065      0.273944  -1.6137       0.1093    
## phiH.3       0.504154      0.346506   1.4550       0.1484    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (0) X(t) + (0) X(t-1)+ (1) X(t-2)
## 
## Value: 771089
# plot(modeloas2)
checkresiduals(ts(modeloas2$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

modeloas3 <-
  setar(
    sactnetcrc_train,
    m = 3,
    mL = 3,
    mH = 2,
    d=1,
    nthresh = 1,
    thDelay = 0,
    type = "level"
  )
## Warning: Possible unit root in the low regime. Roots are: 0.9808 1.6062 1.6062
## Raiz Unitaria
summary(modeloas3) # residuals variance = 0.006319,  AIC = -621, MAPE = 0.4621%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##       const.L        phiL.1        phiL.2        phiL.3 
## -6554.1376992     0.7843393    -0.1477890     0.3951947 
## 
## High regime:
##        const.H         phiH.1         phiH.2 
## -21230.4436532      1.1909899     -0.1927039 
## 
## Threshold:
## -Variable: Z(t) = + (1) X(t)+ (0)X(t-1)+ (0)X(t-2)
## -Value: 737895
## Proportion of points in low regime: 73.11%    High regime: 26.89% 
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -212069.7  -26359.4   -2570.8   30755.0  159863.0 
## 
## Fit:
## residuals variance = 2741163981,  AIC = 2667, MAPE = 6.291%
## 
## Coefficient(s):
## 
##             Estimate   Std. Error  t value       Pr(>|t|)    
## const.L  -6554.13770  27001.52675  -0.2427       0.808645    
## phiL.1       0.78434      0.11867   6.6093 0.000000001262 ***
## phiL.2      -0.14779      0.15100  -0.9788       0.329752    
## phiL.3       0.39519      0.12353   3.1993       0.001781 ** 
## const.H -21230.44365  67109.96245  -0.3164       0.752308    
## phiH.1       1.19099      0.18970   6.2782 0.000000006271 ***
## phiH.2      -0.19270      0.17324  -1.1123       0.268311    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (1) X(t) + (0) X(t-1)+ (0) X(t-2)
## 
## Value: 737895
# plot(modeloas3)
checkresiduals(ts(modeloas3$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

modeloas4 <-
  setar(
    sactnetcrc_train,
    m = 3,
    mL = 1,
    mH = 2,
    d=2,
    nthresh = 1,
    thDelay = 0,
    type = "level"
  )
summary(modeloas4) # residuals variance = 0.006319,  AIC = -621, MAPE = 0.4621%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##       const.L        phiL.1 
## 497047.101814     -0.245272 
## 
## High regime:
##       const.H        phiH.1        phiH.2 
## 55962.5321562     0.6042356     0.3360949 
## 
## Threshold:
## -Variable: Z(t) = + (1) X(t)+ (0)X(t-1)+ (0)X(t-2)
## -Value: 438446
## Proportion of points in low regime: 17.24%    High regime: 82.76% 
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -155836.0  -37647.5   -5849.8   34675.6  193708.0 
## 
## Fit:
## residuals variance = 4576252319,  AIC = 2726, MAPE = 7.827%
## 
## Coefficient(s):
## 
##              Estimate    Std. Error  t value        Pr(>|t|)    
## const.L 497047.101814 229225.967759   2.1684       0.0321548 *  
## phiL.1      -0.245272      0.588794  -0.4166       0.6777585    
## const.H  55962.532156  36809.220638   1.5203       0.1311234    
## phiH.1       0.604236      0.090559   6.6723 0.0000000008819 ***
## phiH.2       0.336095      0.091306   3.6810       0.0003529 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (1) X(t) + (0) X(t-1)+ (0) X(t-2)
## 
## Value: 438446
# plot(modeloas4)
checkresiduals(ts(modeloas4$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

cbind(
Modelo=c("1.SETAR m = 3,mL = 3, mH = 1, d=1,nthresh = 1,thDelay = 2",
         "2.SETAR m = 3,mL = 2, mH = 3, d=2,nthresh = 1,thDelay = 2",
         "3.SETAR m = 3,mL = 3, mH = 2, d=1,nthresh = 1,thDelay = 0",
         "4.SETAR m = 3,mL = 1, mH = 2, d=2,nthresh = 1,thDelay = 0"),
AIC=c(
  AIC(modeloas1),
  AIC(modeloas2),
  AIC(modeloas3),
  AIC(modeloas4)
  
),
BIC=c(
  BIC(modeloas1),
  BIC(modeloas2),
  BIC(modeloas3),
  BIC(modeloas4)
  
)
)%>%
  knitr::kable()
Modelo AIC BIC
1.SETAR m = 3,mL = 3, mH = 1, d=1,nthresh = 1,thDelay = 2 2664.94192921197 2684.57007652511
2.SETAR m = 3,mL = 2, mH = 3, d=2,nthresh = 1,thDelay = 2 2715.40394269942 2737.83611105729
3.SETAR m = 3,mL = 3, mH = 2, d=1,nthresh = 1,thDelay = 0 2667.26111427235 2689.69328263021
4.SETAR m = 3,mL = 1, mH = 2, d=2,nthresh = 1,thDelay = 0 2725.78583996026 2742.60996622866
pronsetar1<- predict(modeloas1, n.ahead = h.param)
pronsetar2<- predict(modeloas2, n.ahead = h.param)
pronsetar3<- predict(modeloas3, n.ahead = h.param)
pronsetar4<- predict(modeloas4, n.ahead = h.param)

fit1<-ts(modeloas1$fitted.values,start =inicio_train,frequency = 12)
fit2<-ts(modeloas2$fitted.values,start =inicio_train,frequency = 12)
fit3<-ts(modeloas3$fitted.values,start =inicio_train,frequency = 12)
fit4<-ts(modeloas4$fitted.values,start =inicio_train,frequency = 12)
MetricasSETARCRC<-data.frame(
  Modelo=rep(c(
         "1.SETAR m = 3,mL = 3, mH = 1, d=1,nthresh = 1,thDelay = 2",
         "2.SETAR m = 3,mL = 2, mH = 3, d=2,nthresh = 1,thDelay = 2",
         "3.SETAR m = 3,mL = 3, mH = 2, d=1,nthresh = 1,thDelay = 0",
         "4.SETAR m = 3,mL = 1, mH = 2, d=2,nthresh = 1,thDelay = 0"),2),
DataSet= c(rep("Entrenamiento",4),rep("Prueba",4)),

rbind(
  getPerformance(fit1,sactnetcrc_train),
  getPerformance(fit2,sactnetcrc_train),
  getPerformance(fit3,sactnetcrc_train),
  getPerformance(fit4,sactnetcrc_train),

  getPerformance(pronsetar1,sactnetcrc_test),
  getPerformance(pronsetar2,sactnetcrc_test),
  getPerformance(pronsetar3,sactnetcrc_test),
  getPerformance(pronsetar4,sactnetcrc_test)
  ))%>%
  arrange(DataSet,RMSE)

MetricasSETARCRC%>%
  knitr::kable(caption="Metricas de Rendimiento Modelos SETAR")
Metricas de Rendimiento Modelos SETAR
Modelo DataSet MAE RSS MSE RMSE
1.SETAR m = 3,mL = 3, mH = 1, d=1,nthresh = 1,thDelay = 2 Entrenamiento 42575.25 411939644750 3376554465 58108.13
3.SETAR m = 3,mL = 3, mH = 2, d=1,nthresh = 1,thDelay = 0 Entrenamiento 43976.89 479773603905 3932570524 62710.21
2.SETAR m = 3,mL = 2, mH = 3, d=2,nthresh = 1,thDelay = 2 Entrenamiento 51081.82 607319064278 4978025117 70555.12
4.SETAR m = 3,mL = 1, mH = 2, d=2,nthresh = 1,thDelay = 0 Entrenamiento 53854.21 669984437781 5491675720 74105.84
1.SETAR m = 3,mL = 3, mH = 1, d=1,nthresh = 1,thDelay = 2 Prueba 58177.95 27099683662 5419936732 73620.22
4.SETAR m = 3,mL = 1, mH = 2, d=2,nthresh = 1,thDelay = 0 Prueba 70611.86 35773255567 7154651113 84585.17
2.SETAR m = 3,mL = 2, mH = 3, d=2,nthresh = 1,thDelay = 2 Prueba 109019.00 80955890516 16191178103 127244.56
3.SETAR m = 3,mL = 3, mH = 2, d=1,nthresh = 1,thDelay = 0 Prueba 118153.55 111351647786 22270329557 149232.47
autoplot(sactnetcrc_train)+
  autolayer(fit1)+
  autolayer(fit2)+
  autolayer(fit3)+
  autolayer(fit4)+
  theme_bw()

autoplot(sactnetcrc_test)+
  autolayer(pronsetar1)+
  autolayer(pronsetar2)+
  autolayer(pronsetar3)+
  autolayer(pronsetar4)+
  theme_bw()+
  scale_y_continuous(limits = c(500000,1400000))

Metricas Generales
rbind(MetricasTARCRC,
MetricasSETARCRC)%>%
  arrange(DataSet,RMSE)%>%
  knitr::kable(caption="Metricas de Rendimiento Modelos No Lineales Colones")
Metricas de Rendimiento Modelos No Lineales Colones
Modelo DataSet MAE RSS MSE RMSE
3.TAR p1=3,p2=3,d=1 Entrenamiento 39772.97 328942351055 2696248779 51925.42
1.TAR p1=3,p2=2,d=1 Entrenamiento 40308.12 338913405336 2777978732 52706.53
2.TAR p1=3,p2=1,d=2 Entrenamiento 40514.30 351547834112 2881539624 53679.97
1.SETAR m = 3,mL = 3, mH = 1, d=1,nthresh = 1,thDelay = 2 Entrenamiento 42575.25 411939644750 3376554465 58108.13
3.SETAR m = 3,mL = 3, mH = 2, d=1,nthresh = 1,thDelay = 0 Entrenamiento 43976.89 479773603905 3932570524 62710.21
2.SETAR m = 3,mL = 2, mH = 3, d=2,nthresh = 1,thDelay = 2 Entrenamiento 51081.82 607319064278 4978025117 70555.12
4.SETAR m = 3,mL = 1, mH = 2, d=2,nthresh = 1,thDelay = 0 Entrenamiento 53854.21 669984437781 5491675720 74105.84
1.SETAR m = 3,mL = 3, mH = 1, d=1,nthresh = 1,thDelay = 2 Prueba 58177.95 27099683662 5419936732 73620.22
3.TAR p1=3,p2=3,d=1 Prueba 59009.97 30898688005 6179737601 78611.31
4.SETAR m = 3,mL = 1, mH = 2, d=2,nthresh = 1,thDelay = 0 Prueba 70611.86 35773255567 7154651113 84585.17
1.TAR p1=3,p2=2,d=1 Prueba 84725.95 62293400375 12458680075 111618.46
2.TAR p1=3,p2=1,d=2 Prueba 87059.11 64393263703 12878652741 113484.15
2.SETAR m = 3,mL = 2, mH = 3, d=2,nthresh = 1,thDelay = 2 Prueba 109019.00 80955890516 16191178103 127244.56
3.SETAR m = 3,mL = 3, mH = 2, d=1,nthresh = 1,thDelay = 0 Prueba 118153.55 111351647786 22270329557 149232.47
autoplot(sactnetcrc_test)+
  autolayer(prontar3)+
  autolayer(pronsetar1)+
  autolayer(prontar1)+
  autolayer(pronsetar4)+
  theme_bw()+
  scale_y_continuous(limits = c(500000,1400000))

Serie en Dolares

TAR
# m orden
pm <- 1:4

mod.list.tar<-list()
AIC.best.list<-list()

AICM = NULL
model.best <- list(d=0, p1=0, p2=0)
AIC.best = 10000

for(l in pm){
  for(j in pm){
    for(i in pm){
      set.seed(777)
      model.tar.s = tar(sactnetusd_train,p1=j,p2=i,d=l)
      mod.list.tar[[paste(j,i,l,sep="-")]]<-model.tar.s$AIC
      print(paste("Modelo:",j,i,l,sep="-"))    
      
      if (model.tar.s$AIC < AIC.best) {
            AIC.best = model.tar.s$AIC
            AIC.best.list[[paste(j,i,l,sep="-")]]<-AIC.best
            #print("Modelo:",j,i,l,"AIC",AIC.best)
            model.best$d = l
            model.best$p1 = model.tar.s$p1
            model.best$p2 = model.tar.s$p2 
            print(paste(model.tar.s$p1,model.tar.s$p2,l,sep="-")) }
    }
  }
}
## [1] "Modelo:-1-1-1"
## [1] "0-1-1"
## [1] "Modelo:-1-2-1"
## [1] "0-1-1"
## [1] "Modelo:-1-3-1"
## [1] "0-1-1"
## [1] "Modelo:-1-4-1"
## [1] "0-1-1"
## [1] "Modelo:-2-1-1"
## [1] "Modelo:-2-2-1"
## [1] "Modelo:-2-3-1"
## [1] "Modelo:-2-4-1"
## [1] "2-1-1"
## [1] "Modelo:-3-1-1"
## [1] "Modelo:-3-2-1"
## [1] "Modelo:-3-3-1"
## [1] "Modelo:-3-4-1"
## [1] "3-1-1"
## [1] "Modelo:-4-1-1"
## [1] "Modelo:-4-2-1"
## [1] "Modelo:-4-3-1"
## [1] "Modelo:-4-4-1"
## [1] "Modelo:-1-1-2"
## [1] "Modelo:-1-2-2"
## [1] "Modelo:-1-3-2"
## [1] "Modelo:-1-4-2"
## [1] "Modelo:-2-1-2"
## [1] "Modelo:-2-2-2"
## [1] "Modelo:-2-3-2"
## [1] "Modelo:-2-4-2"
## [1] "Modelo:-3-1-2"
## [1] "Modelo:-3-2-2"
## [1] "Modelo:-3-3-2"
## [1] "Modelo:-3-4-2"
## [1] "Modelo:-4-1-2"
## [1] "Modelo:-4-2-2"
## [1] "Modelo:-4-3-2"
## [1] "Modelo:-4-4-2"
## [1] "Modelo:-1-1-3"
## [1] "Modelo:-1-2-3"
## [1] "Modelo:-1-3-3"
## [1] "Modelo:-1-4-3"
## [1] "Modelo:-2-1-3"
## [1] "Modelo:-2-2-3"
## [1] "Modelo:-2-3-3"
## [1] "Modelo:-2-4-3"
## [1] "Modelo:-3-1-3"
## [1] "Modelo:-3-2-3"
## [1] "Modelo:-3-3-3"
## [1] "Modelo:-3-4-3"
## [1] "Modelo:-4-1-3"
## [1] "Modelo:-4-2-3"
## [1] "Modelo:-4-3-3"
## [1] "Modelo:-4-4-3"
## [1] "Modelo:-1-1-4"
## [1] "Modelo:-1-2-4"
## [1] "Modelo:-1-3-4"
## [1] "Modelo:-1-4-4"
## [1] "Modelo:-2-1-4"
## [1] "Modelo:-2-2-4"
## [1] "Modelo:-2-3-4"
## [1] "Modelo:-2-4-4"
## [1] "Modelo:-3-1-4"
## [1] "Modelo:-3-2-4"
## [1] "Modelo:-3-3-4"
## [1] "Modelo:-3-4-4"
## [1] "Modelo:-4-1-4"
## [1] "Modelo:-4-2-4"
## [1] "Modelo:-4-3-4"
## [1] "Modelo:-4-4-4"
# AICTar<-bind_rows(mod.list.tar,.id = "Ordene-delay")%>%
#   arrange(`1`)
# 
# knitr::kable(head(AICTar,20))

AICTarBest<-bind_rows(AIC.best.list,.id = "Ordene-delay")%>%
  arrange(`1`)

knitr::kable(head(AICTarBest,20))
Ordene-delay 1
3-4-1 1349
2-4-1 1350
1-4-1 1360
1-3-1 1372
1-2-1 1382
1-1-1 1394
mod.tar1.usd<-TSA::tar(sactnetusd_train,p1=3,p2=4,d=1)  
mod.tar2.usd<-TSA::tar(sactnetusd_train,p1=1,p2=2,d=1)  
mod.tar3.usd<-TSA::tar(sactnetusd_train,p1=1,p2=3,d=1)  

mod.tar1.usd$thd
##          
## 622.0209
mod.tar2.usd$thd
##          
## 590.6428
mod.tar3.usd$thd
##          
## 590.6428
mod.tar1.usd$qr1$coefficients
## intercept-sactnetusd_train      lag1-sactnetusd_train 
##                181.4332347                  1.6151098 
##      lag2-sactnetusd_train      lag3-sactnetusd_train 
##                 -1.3676001                  0.4599062
mod.tar2.usd$qr1$coefficients
## intercept-sactnetusd_train 
##                   569.0528
mod.tar3.usd$qr1$coefficients
## intercept-sactnetusd_train 
##                   569.0528
mod.tar1.usd$qr2$coefficients
## intercept-sactnetusd_train      lag1-sactnetusd_train 
##                 41.7045508                  0.9679892
mod.tar2.usd$qr2$coefficients
## intercept-sactnetusd_train      lag1-sactnetusd_train 
##                 36.6212186                  0.9721871
mod.tar3.usd$qr2$coefficients
## intercept-sactnetusd_train      lag1-sactnetusd_train 
##                 32.8396744                  0.9753914
data.frame(
Modelo=c("1. TAR p1=3,p2=4,d=1",
         "2. TAR p1=1,p2=2,d=1",
         "3. TAR p1=1,p2=3,d=1"),
AIC=c(mod.tar1.usd$AIC,
mod.tar2.usd$AIC,
mod.tar3.usd$AIC))%>%
  arrange(AIC)%>%
  knitr::kable()
Modelo AIC
1. TAR p1=3,p2=4,d=1 1349
3. TAR p1=1,p2=3,d=1 1372
2. TAR p1=1,p2=2,d=1 1382
tsdiag(mod.tar1.usd)

tsdiag(mod.tar2.usd)

tsdiag(mod.tar3.usd)

checkresiduals(ts(mod.tar1.usd$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

checkresiduals(ts(mod.tar2.usd$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

checkresiduals(ts(mod.tar3.usd$residuals,start=inicio_train,frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

prontar1.usd<- ts(as.vector(predict(mod.tar1.usd,n.ahead=h.param,n.sim=1000)$fit),start=inicio_test,frequency = 12)
prontar2.usd<- ts(as.vector(predict(mod.tar2.usd,n.ahead=h.param,n.sim=1000)$fit),start=inicio_test,frequency = 12)
prontar3.usd<- ts(as.vector(predict(mod.tar3.usd,n.ahead=h.param,n.sim=1000)$fit),start=inicio_test,frequency = 12)

fit1.usd<-ts(as.vector(mod.tar1.usd$y)-as.vector(mod.tar1.usd$residuals),start =inicio_train,frequency = 12)
## Warning in as.vector(mod.tar1.usd$y) - as.vector(mod.tar1.usd$residuals): longer
## object length is not a multiple of shorter object length
fit2.usd<-ts(as.vector(mod.tar1.usd$y)-mod.tar2.usd$residuals,start =inicio_train,frequency = 12)
## Warning in as.vector(mod.tar1.usd$y) - mod.tar2.usd$residuals: longer object
## length is not a multiple of shorter object length
fit3.usd<-ts(as.vector(mod.tar1.usd$y)-mod.tar3.usd$residuals,start =inicio_train,frequency = 12)
## Warning in as.vector(mod.tar1.usd$y) - mod.tar3.usd$residuals: longer object
## length is not a multiple of shorter object length
MetricasTARUSD<-data.frame(
  Modelo=rep(c("1. TAR p1=3,p2=4,d=1",
         "2. TAR p1=1,p2=2,d=1",
         "3. TAR p1=1,p2=3,d=1"),2),
DataSet= c(rep("Entrenamiento",3),rep("Prueba",3)),

rbind(getPerformance(fit1.usd,sactnetusd_train),
getPerformance(fit2.usd,sactnetusd_train),
getPerformance(fit3.usd,sactnetusd_train),

getPerformance(prontar1.usd,sactnetusd_test),
getPerformance(prontar2.usd,sactnetusd_test),
getPerformance(prontar3.usd,sactnetusd_test)))%>%
  arrange(DataSet,RMSE)

MetricasTARUSD%>%
  knitr::kable(caption="Metricas de Rendimiento Modelos TAR USD")
Metricas de Rendimiento Modelos TAR USD
Modelo DataSet MAE RSS MSE RMSE
1. TAR p1=3,p2=4,d=1 Entrenamiento 55.96542 676293.4 5543.389 74.45394
3. TAR p1=1,p2=3,d=1 Entrenamiento 59.12611 711230.7 5829.760 76.35286
2. TAR p1=1,p2=2,d=1 Entrenamiento 59.42877 713711.6 5850.095 76.48592
3. TAR p1=1,p2=3,d=1 Prueba 121.80264 106912.6 21382.522 146.22764
2. TAR p1=1,p2=2,d=1 Prueba 125.33080 113962.3 22792.469 150.97175
1. TAR p1=3,p2=4,d=1 Prueba 131.18617 124281.3 24856.252 157.65866
autoplot(sactnetusd_train)+
  autolayer(fit1.usd)+
  autolayer(fit2.usd)+
  autolayer(fit3.usd)+
  theme_bw()

autoplot(sactnetusd_test)+
  autolayer(prontar1.usd)+
  autolayer(prontar2.usd)+
  autolayer(prontar3.usd)+
  theme_bw()

SETAR

Thus the threshold delay, the number of lags in each regime and the threshold value are computed.

Setar1.usd <-
  selectSETAR(
    sactnetusd_train, 
    include = c("const", "trend","none", "both"),
    m = 4,
    thDelay = seq(0, 3, by = 1),
    nthresh = 3,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 4 
## Using maximum autoregressive order for high regime: mH = 4 
## Searching on 82 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  5248  combinations of thresholds ( 82 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 ) 
## 
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61

Setar2.usd <-
  selectSETAR(
    sactnetusd_train,
    m = 4,
    d=2,
    thDelay = seq(0, 3, by = 1),
    nthresh = 3,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 4 
## Using maximum autoregressive order for high regime: mH = 4 
## Searching on 78 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  4992  combinations of thresholds ( 78 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 ) 
## 
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601

Setar3.usd <-
  selectSETAR(
    sactnetusd_train,
    m = 4,
    thDelay = seq(0, 3, by = 1),
    nthresh = 3,
    d = 1,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 4 
## Using maximum autoregressive order for high regime: mH = 4 
## Searching on 82 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  5248  combinations of thresholds ( 82 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 ) 
## 
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1212.776
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61
##  1 T: Trim not respected:  0.8559322 0.1440678 from th: 1214.61
##  1 T: Trim not respected:  0.8644068 0.1355932 from th: 1214.61
##  1 T: Trim not respected:  0.8728814 0.1271186 from th: 1214.61

Setar4.usd <-
  selectSETAR(
    sactnetusd_train,
    m = 4,
    thDelay = seq(0, 3, by = 1),
    nthresh = 3,
    d = 2,
    criterion = "AIC",
    type = "level",
    plot = T,
    trace = T
  )
## Using maximum autoregressive order for low regime: mL = 4 
## Using maximum autoregressive order for high regime: mH = 4 
## Searching on 78 possible threshold values within regimes with sufficient ( 15% ) number of observations
## Searching on  4992  combinations of thresholds ( 78 ), thDelay ( 4 ), mL ( 4 ) and MM ( 4 ) 
## 
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1207.644
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1208.27
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8508772 0.1491228 from th: 1212.131
##  1 T: Trim not respected:  0.8684211 0.1315789 from th: 1212.131
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1212.381
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1212.381
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601
##  1 T: Trim not respected:  0.8596491 0.1403509 from th: 1213.601
##  1 T: Trim not respected:  0.877193 0.122807 from th: 1213.601
##  1 T: Trim not respected:  0.8947368 0.1052632 from th: 1213.601

Setar1.usd$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       1  1  1 1212.776 1055.814
## 2       1  1  1 1202.790 1056.177
## 3       1  2  1 1212.776 1056.251
## 4       1  2  1 1202.790 1056.635
## 5       1  1  1 1208.270 1056.776
Setar2.usd$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       0  1  4 1182.401 1119.110
## 2       0  1  3 1182.401 1119.187
## 3       0  1  4 1191.128 1119.788
## 4       0  1  4 1193.428 1119.823
## 5       0  1  3 1193.428 1119.826
Setar3.usd$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       1  1  1 1212.776 1055.814
## 2       1  1  1 1202.790 1056.177
## 3       1  2  1 1212.776 1056.251
## 4       1  2  1 1202.790 1056.635
## 5       1  1  1 1208.270 1056.776
Setar4.usd$allTh%>%
  as.data.frame()%>%
  arrange(AIC,thDelay,mL,mH)%>%
  head(5)
##   thDelay mL mH       th      AIC
## 1       0  1  4 1182.401 1119.110
## 2       0  1  3 1182.401 1119.187
## 3       0  1  4 1191.128 1119.788
## 4       0  1  4 1193.428 1119.823
## 5       0  1  3 1193.428 1119.826
modeloas1.usd <-
  setar(
    sactnetusd_train,
    mL = 1,
    mH = 1,
    d=1,
    nthresh = 1,
    thDelay = 2,
    type = "level"
  )
## 
##  1 T: Trim not respected:  0.8655462 0.1344538 from th: 1214.61
## Raiz Unitaria
summary(modeloas1.usd) #residuals variance = 0.005525,  AIC = -632, MAPE = 0.4352%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##     const.L      phiL.1 
## 149.9768322   0.7745225 
## 
## High regime:
##    const.H     phiH.1 
## 97.1339609  0.9222561 
## 
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (0)X(t-1)+ (1)X(t-2)
## -Value: 790.7
## Proportion of points in low regime: 24.37%    High regime: 75.63% 
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -189.9204  -49.4084   -6.5524   42.2501  252.5032 
## 
## Fit:
## residuals variance = 5509,  AIC = 1061, MAPE = 5.783%
## 
## Coefficient(s):
## 
##           Estimate  Std. Error  t value              Pr(>|t|)    
## const.L 149.976832   88.379070   1.6970               0.09234 .  
## phiL.1    0.774522    0.132371   5.8512         0.00000004466 ***
## const.H  97.133961   55.118809   1.7623               0.08061 .  
## phiH.1    0.922256    0.048606  18.9740 < 0.00000000000000022 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (0) X(t) + (0) X(t-1)+ (1) X(t-2)
## 
## Value: 790.7
# plot(modeloas1)
checkresiduals(ts(modeloas1.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

modeloas2.usd <-
  setar(
    sactnetusd_train,
    mL = 1,
    mH = 3,
    d=2,
    nthresh = 1,
    thDelay = 1,
    type = "level"
  )
## 
##  1 T: Trim not respected:  0.862069 0.137931 from th: 1213.601
## Warning: Possible unit root in the high regime. Roots are: 0.8736 1.2514 1.2514
## Raiz Unitaria
summary(modeloas2.usd) # residuals variance = 0.005857,  AIC = -635, MAPE = 0.4584%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##    const.L     phiL.1 
## 94.1527345  0.9211025 
## 
## High regime:
##      const.H       phiH.1       phiH.2       phiH.3 
## -318.4527806    0.7902831   -0.2328454    0.7309027 
## 
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (1)X(t-1)+ (0)X(t-2)
## -Value: 1208
## Proportion of points in low regime: 82.76%    High regime: 17.24% 
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -290.709  -60.694  -11.602   59.844  282.297 
## 
## Fit:
## residuals variance = 9698,  AIC = 1134, MAPE = 8.4%
## 
## Coefficient(s):
## 
##            Estimate  Std. Error  t value              Pr(>|t|)    
## const.L   94.152734   44.346631   2.1231             0.0358703 *  
## phiL.1     0.921103    0.044864  20.5309 < 0.00000000000000022 ***
## const.H -318.452781  287.963339  -1.1059             0.2710669    
## phiH.1     0.790283    0.226575   3.4880             0.0006886 ***
## phiH.2    -0.232845    0.363553  -0.6405             0.5231297    
## phiH.3     0.730903    0.216327   3.3787             0.0009924 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (0) X(t) + (1) X(t-1)+ (0) X(t-2)
## 
## Value: 1208
# plot(modeloas2)
checkresiduals(ts(modeloas2.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

modeloas3.usd <-
  setar(
    sactnetusd_train,
    mL = 1,
    mH = 1,
    d=1,
    nthresh = 1,
    thDelay = 1,
    type = "level"
  )
## 
##  1 T: Trim not respected:  0.8583333 0.1416667 from th: 1214.61
## Warning: Possible unit root in the high regime. Roots are: 0.7946
## Raiz Unitaria
summary(modeloas3.usd) # residuals variance = 0.006319,  AIC = -621, MAPE = 0.4621%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##    const.L     phiL.1 
## 26.5631680  0.9873869 
## 
## High regime:
##     const.H      phiH.1 
## -366.770565    1.258479 
## 
## Threshold:
## -Variable: Z(t) = + (0) X(t)+ (1)X(t-1)
## -Value: 1213
## Proportion of points in low regime: 84.17%    High regime: 15.83% 
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -159.9134  -51.1907   -7.7945   49.2934  246.0615 
## 
## Fit:
## residuals variance = 5365,  AIC = 1058, MAPE = 5.927%
## 
## Coefficient(s):
## 
##            Estimate  Std. Error  t value             Pr(>|t|)    
## const.L   26.563168   32.728557   0.8116              0.41864    
## phiL.1     0.987387    0.033652  29.3408 < 0.0000000000000002 ***
## const.H -366.770565  154.135328  -2.3795              0.01894 *  
## phiH.1     1.258479    0.115936  10.8549 < 0.0000000000000002 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (0) X(t) + (1) X(t-1)
## 
## Value: 1213
# plot(modeloas3)
checkresiduals(ts(modeloas3.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

modeloas4.usd <-
  setar(
    sactnetusd_train,
    m = 4,
    mL = 1,
    mH = 4,
    d=2,
    nthresh = 1,
    thDelay = 0,
    type = "level"
  )
## Warning: Possible unit root in the high regime. Roots are: 0.7542 1.3308 1.3308
## 2.3404
## Warning: Possible unit root in the low regime. Roots are: 0.9945
summary(modeloas4.usd) # residuals variance = 0.006319,  AIC = -621, MAPE = 0.4621%
## 
## Non linear autoregressive model
## 
## SETAR model ( 2 regimes)
## Coefficients:
## Low regime:
##   const.L    phiL.1 
## 32.723268  1.005528 
## 
## High regime:
##      const.H       phiH.1       phiH.2       phiH.3       phiH.4 
## -533.2039159    1.2228038   -0.2014876    0.6895297   -0.3198967 
## 
## Threshold:
## -Variable: Z(t) = + (1) X(t)+ (0)X(t-1)+ (0)X(t-2)+ (0)X(t-3)
## -Value: 1182
## Proportion of points in low regime: 75.44%    High regime: 24.56% 
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -249.0256  -67.8389    6.3576   57.1181  275.2895 
## 
## Fit:
## residuals variance = 8450,  AIC = 1119, MAPE = 7.924%
## 
## Coefficient(s):
## 
##            Estimate  Std. Error  t value              Pr(>|t|)    
## const.L   32.723268   48.127028   0.6799             0.4979119    
## phiL.1     1.005528    0.050783  19.8003 < 0.00000000000000022 ***
## const.H -533.203916  232.449479  -2.2938             0.0236151 *  
## phiH.1     1.222804    0.240614   5.0820            0.00000146 ***
## phiH.2    -0.201488    0.216292  -0.9316             0.3535187    
## phiH.3     0.689530    0.203927   3.3813             0.0009865 ***
## phiH.4    -0.319897    0.227650  -1.4052             0.1626545    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Threshold
## Variable: Z(t) = + (1) X(t) + (0) X(t-1)+ (0) X(t-2)+ (0) X(t-3)
## 
## Value: 1182
# plot(modeloas4)
checkresiduals(ts(modeloas4.usd$residuals,start=c(2011,1),frequency = 12))
## Warning in modeldf.default(object): Could not find appropriate degrees of
## freedom for this model.

cbind(
Modelo=c("1.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 2",
         "2.SETAR m = 4,mL = 1, mH = 3, d=2,nthresh = 1,thDelay = 2",
         "3.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 1",
         "4.SETAR m = 4,mL = 1, mH = 4, d=2,nthresh = 1,thDelay = 0"),
AIC=c(
  AIC(modeloas1),
  AIC(modeloas2),
  AIC(modeloas3),
  AIC(modeloas4)
  
),
BIC=c(
  BIC(modeloas1),
  BIC(modeloas2),
  BIC(modeloas3),
  BIC(modeloas4)
  
)
)%>%
  knitr::kable()
Modelo AIC BIC
1.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 2 2664.94192921197 2684.57007652511
2.SETAR m = 4,mL = 1, mH = 3, d=2,nthresh = 1,thDelay = 2 2715.40394269942 2737.83611105729
3.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 1 2667.26111427235 2689.69328263021
4.SETAR m = 4,mL = 1, mH = 4, d=2,nthresh = 1,thDelay = 0 2725.78583996026 2742.60996622866
pronsetar1.usd<- predict(modeloas1.usd, n.ahead = h.param)
pronsetar2.usd<- predict(modeloas2.usd, n.ahead = h.param)
pronsetar3.usd<- predict(modeloas3.usd, n.ahead = h.param)
pronsetar4.usd<- predict(modeloas4.usd, n.ahead = h.param)

fit1.usd<-ts(modeloas1.usd$fitted.values,start =inicio_train,frequency = 12)
fit2.usd<-ts(modeloas2.usd$fitted.values,start =inicio_train,frequency = 12)
fit3.usd<-ts(modeloas3.usd$fitted.values,start =inicio_train,frequency = 12)
fit4.usd<-ts(modeloas4.usd$fitted.values,start =inicio_train,frequency = 12)
MetricasSETARUSD<-data.frame(
  Modelo=rep(
    c("1.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 2",
      "2.SETAR m = 4,mL = 1, mH = 3, d=2,nthresh = 1,thDelay = 2",
      "3.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 1",
      "4.SETAR m = 4,mL = 1, mH = 4, d=2,nthresh = 1,thDelay = 0"),2),
DataSet= c(rep("Entrenamiento",4),rep("Prueba",4)),

rbind(
  getPerformance(fit1.usd,sactnetusd_train),
  getPerformance(fit2.usd,sactnetusd_train),
  getPerformance(fit3.usd,sactnetusd_train),
  getPerformance(fit4.usd,sactnetusd_train),

  getPerformance(pronsetar1.usd,sactnetusd_test),
  getPerformance(pronsetar2.usd,sactnetusd_test),
  getPerformance(pronsetar3.usd,sactnetusd_test),
  getPerformance(pronsetar4.usd,sactnetusd_test)
  ))%>%
  arrange(DataSet,RMSE)

MetricasSETARUSD%>%
  knitr::kable(caption="Metricas de Rendimiento Modelos SETAR")
Metricas de Rendimiento Modelos SETAR
Modelo DataSet MAE RSS MSE RMSE
3.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 1 Entrenamiento 65.24851 867891.56 7113.865 84.34373
1.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 2 Entrenamiento 76.03300 1190732.79 9760.105 98.79324
2.SETAR m = 4,mL = 1, mH = 3, d=2,nthresh = 1,thDelay = 2 Entrenamiento 98.93892 2057277.74 16862.932 129.85735
4.SETAR m = 4,mL = 1, mH = 4, d=2,nthresh = 1,thDelay = 0 Entrenamiento 120.78529 3019923.18 24753.469 157.33235
4.SETAR m = 4,mL = 1, mH = 4, d=2,nthresh = 1,thDelay = 0 Prueba 53.47200 30513.91 6102.783 78.12031
2.SETAR m = 4,mL = 1, mH = 3, d=2,nthresh = 1,thDelay = 2 Prueba 60.85853 42188.91 8437.782 91.85740
1.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 2 Prueba 176.35296 221897.62 44379.524 210.66448
3.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 1 Prueba 195.36421 265812.38 53162.477 230.56990
autoplot(sactnetusd_train)+
  autolayer(fit1.usd)+
  autolayer(fit2.usd)+
  autolayer(fit3.usd)+
  autolayer(fit4.usd)+
  theme_bw()

autoplot(sactnetusd_test)+
  autolayer(pronsetar1.usd)+
  autolayer(pronsetar2.usd)+
  autolayer(pronsetar3.usd)+
  autolayer(pronsetar4.usd)+
  theme_bw()

Metricas Generales
rbind(MetricasTARUSD,
MetricasSETARUSD)%>%
  arrange(DataSet,RMSE)%>%
  knitr::kable(caption="Metricas de Rendimiento Modelos No Lineales Dolares")
Metricas de Rendimiento Modelos No Lineales Dolares
Modelo DataSet MAE RSS MSE RMSE
1. TAR p1=3,p2=4,d=1 Entrenamiento 55.96542 676293.45 5543.389 74.45394
3. TAR p1=1,p2=3,d=1 Entrenamiento 59.12611 711230.72 5829.760 76.35286
2. TAR p1=1,p2=2,d=1 Entrenamiento 59.42877 713711.62 5850.095 76.48592
3.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 1 Entrenamiento 65.24851 867891.56 7113.865 84.34373
1.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 2 Entrenamiento 76.03300 1190732.79 9760.105 98.79324
2.SETAR m = 4,mL = 1, mH = 3, d=2,nthresh = 1,thDelay = 2 Entrenamiento 98.93892 2057277.74 16862.932 129.85735
4.SETAR m = 4,mL = 1, mH = 4, d=2,nthresh = 1,thDelay = 0 Entrenamiento 120.78529 3019923.18 24753.469 157.33235
4.SETAR m = 4,mL = 1, mH = 4, d=2,nthresh = 1,thDelay = 0 Prueba 53.47200 30513.91 6102.783 78.12031
2.SETAR m = 4,mL = 1, mH = 3, d=2,nthresh = 1,thDelay = 2 Prueba 60.85853 42188.91 8437.782 91.85740
3. TAR p1=1,p2=3,d=1 Prueba 121.80264 106912.61 21382.522 146.22764
2. TAR p1=1,p2=2,d=1 Prueba 125.33080 113962.34 22792.469 150.97175
1. TAR p1=3,p2=4,d=1 Prueba 131.18617 124281.26 24856.252 157.65866
1.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 2 Prueba 176.35296 221897.62 44379.524 210.66448
3.SETAR m = 4,mL = 1, mH = 1, d=1,nthresh = 1,thDelay = 1 Prueba 195.36421 265812.38 53162.477 230.56990
autoplot(sactnetusd_test)+
  autolayer(prontar1.usd)+
  autolayer(pronsetar1.usd)+
  theme_bw()

3.2.3. Modelo Machine Learning

# Machine Learning
library(tidymodels)
library(modeltime)
library(modeltime.ensemble)
library(modeltime.resample)
library(timetk)
library(tidyverse)

Serie en Colones

colones%>%
  plot_time_series(Date,value,.facet_ncol = 3, .interactive = F)
DATA PREPARATION
FORECAST_HORIZON <- 5
Full = Training + Forecast Dataset
full_data_tbl <- colones%>%
  select(Date,value)%>%
  future_frame(
    .date_var = Date,
    .length_out = FORECAST_HORIZON,
    .bind_data = T
  )

is.na(full_data_tbl$value)
##   [1] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [13] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [25] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [37] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [49] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [61] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [73] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [85] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
##  [97] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [109] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [121] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [133] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [145] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [157] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [169] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [181] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [193] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [205] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [217] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [229] FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE FALSE
## [241] FALSE FALSE FALSE FALSE FALSE FALSE  TRUE  TRUE  TRUE  TRUE  TRUE
Training Data
data_prepared_tbl <- full_data_tbl[!is.na(full_data_tbl$value),]
  
# data_prepared_tbl%>%
#   tk_summary_diagnostics()
Future Data Forecast
future_tbl <- full_data_tbl[is.na(full_data_tbl$value),]
SPLITTING
splits <- data_prepared_tbl%>%
  arrange(Date)%>%
  time_series_split(
    data_var=Date,
    assess = FORECAST_HORIZON,
    cumulative = T
  )
## Using date_var: Date
splits
## <Analysis/Assess/Total>
## <241/5/246>
PREPROCESOR
recipe_spec_1 <- recipe(value~., training(splits))%>%
  step_timeseries_signature(Date)%>%
  ## Elimina las columnas o atributos que no aportan
  step_rm(matches("(.iso$)|(.xts)|(day)|(hour)|(minute)|(second)|(am.pm)|(week)")) %>%
  step_normalize(Date_index.num,Date_year)%>%
  step_mutate(Date_month = factor(Date_month,ordered = T))%>%
  step_dummy(all_nominal(),one_hot = T)

recipe_spec_1 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date              <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value             <dbl> 12637.44, 13569.26, 11895.00, 12882.29, 13393.90, 11…
## $ Date_index.num    <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year         <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half         <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter      <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02     <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03     <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04     <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05     <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06     <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07     <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08     <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09     <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_2 <- recipe_spec_1%>%
  update_role(Date,new_role = "ID")

recipe_spec_2 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date              <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value             <dbl> 12637.44, 13569.26, 11895.00, 12882.29, 13393.90, 11…
## $ Date_index.num    <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year         <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half         <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter      <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02     <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03     <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04     <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05     <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06     <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07     <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08     <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09     <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_1 %>% prep() %>% summary()
## # A tibble: 30 × 4
##    variable       type    role      source  
##    <chr>          <chr>   <chr>     <chr>   
##  1 Date           date    predictor original
##  2 value          numeric outcome   original
##  3 Date_index.num numeric predictor derived 
##  4 Date_year      numeric predictor derived 
##  5 Date_half      numeric predictor derived 
##  6 Date_quarter   numeric predictor derived 
##  7 Date_month_01  numeric predictor derived 
##  8 Date_month_02  numeric predictor derived 
##  9 Date_month_03  numeric predictor derived 
## 10 Date_month_04  numeric predictor derived 
## # … with 20 more rows
recipe_spec_2 %>% prep() %>% summary()
## # A tibble: 30 × 4
##    variable       type    role      source  
##    <chr>          <chr>   <chr>     <chr>   
##  1 Date           date    ID        original
##  2 value          numeric outcome   original
##  3 Date_index.num numeric predictor derived 
##  4 Date_year      numeric predictor derived 
##  5 Date_half      numeric predictor derived 
##  6 Date_quarter   numeric predictor derived 
##  7 Date_month_01  numeric predictor derived 
##  8 Date_month_02  numeric predictor derived 
##  9 Date_month_03  numeric predictor derived 
## 10 Date_month_04  numeric predictor derived 
## # … with 20 more rows
MODELS
autoarima xgboost
wflw_fit_autoarima_boost <- workflow()%>%
  add_model(
    arima_boost(
    min_n = 2,
    learn_rate = 0.015
) %>%
    set_engine(engine = "auto_arima_xgboost")
  )%>%
  add_recipe(recipe_spec_1)%>%
  fit(training(splits))
## frequency = 12 observations per 1 year
prophet
wflw_fit_prophet <- workflow()%>%
  add_model(
    prophet_reg() %>% set_engine("prophet")
  )%>%
  add_recipe(recipe_spec_1)%>%
  fit(training(splits))
## Disabling weekly seasonality. Run prophet with weekly.seasonality=TRUE to override this.
## Disabling daily seasonality. Run prophet with daily.seasonality=TRUE to override this.
XGBOOST
wflw_fit_xgboost_0_015 <- workflow()%>%
  add_model(
    boost_tree(learn_rate=0.15) %>% set_engine("xgboost")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_xgboost_0_1 <- workflow()%>%
  add_model(
    boost_tree(learn_rate=0.1) %>% set_engine("xgboost")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_xgboost_0_3 <- workflow()%>%
  add_model(
    boost_tree(learn_rate=0.3) %>% set_engine("xgboost")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))
Random Forest
wflw_fit_rf_1000 <- workflow()%>%
  add_model(
    rand_forest(
                trees = 1000
                ) %>% 
      set_engine("randomForest")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_rf_500 <- workflow()%>%
  add_model(
    rand_forest(
                trees = 500
                ) %>% 
      set_engine("randomForest")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_rf_200 <- workflow()%>%
  add_model(
    rand_forest(
                trees = 200
                ) %>% 
      set_engine("randomForest")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))
SVM
wflw_fit_svm <- workflow()%>%
  add_model(
    svm_rbf() %>% set_engine("kernlab")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))
prophet_boost
wflw_fit_prophet_boost <- workflow()%>%
  add_model(
    prophet_boost(
      seasonality_yearly = F,
      seasonality_weekly = F,
      seasonality_daily =  F,
    ) %>% 
      set_engine("prophet_xgboost")
  )%>%
  add_recipe(recipe_spec_1)%>%
  fit(training(splits))
MODELTIME WORKFLOW
modeltime table
submodels_tbl <- modeltime_table(
  wflw_fit_autoarima_boost,
  #wflw_fit_prophet, #1
  wflw_fit_prophet_boost, #2
  #wflw_fit_xgboost_0_015, #3
  #wflw_fit_xgboost_0_1, #4
  wflw_fit_xgboost_0_3, #5
  #wflw_fit_rf_1000, #6
  wflw_fit_rf_500 #, #7
  #wflw_fit_rf_200, #8
  #wflw_fit_svm #9
)

submodels_tbl
## # Modeltime Table
## # A tibble: 4 × 3
##   .model_id .model     .model_desc                              
##       <int> <list>     <chr>                                    
## 1         1 <workflow> ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS
## 2         2 <workflow> PROPHET W/ XGBOOST ERRORS                
## 3         3 <workflow> XGBOOST                                  
## 4         4 <workflow> RANDOMFOREST
calibrate Testing Data
submodels_calibrated_tbl <- submodels_tbl %>%
  modeltime_calibrate(testing(splits))

submodels_calibrated_tbl
## # Modeltime Table
## # A tibble: 4 × 5
##   .model_id .model     .model_desc                        .type .calibration_da…
##       <int> <list>     <chr>                              <chr> <list>          
## 1         1 <workflow> ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOS… Test  <tibble [5 × 4]>
## 2         2 <workflow> PROPHET W/ XGBOOST ERRORS          Test  <tibble [5 × 4]>
## 3         3 <workflow> XGBOOST                            Test  <tibble [5 × 4]>
## 4         4 <workflow> RANDOMFOREST                       Test  <tibble [5 × 4]>
Measure Test Accuracy
submodels_calibrated_tbl%>% 
  modeltime_accuracy()%>%
  arrange(rmse)
## # A tibble: 4 × 9
##   .model_id .model_desc              .type    mae  mape  mase smape   rmse   rsq
##       <int> <chr>                    <chr>  <dbl> <dbl> <dbl> <dbl>  <dbl> <dbl>
## 1         1 ARIMA(1,1,2)(2,0,0)[12]… Test  32499.  3.20 0.568  3.21 37479. 0.876
## 2         2 PROPHET W/ XGBOOST ERRO… Test  52131.  5.27 0.911  5.21 64436. 0.963
## 3         3 XGBOOST                  Test  46464.  4.47 0.812  4.67 66341. 0.521
## 4         4 RANDOMFOREST             Test  58366.  5.61 1.02   5.84 71947. 0.923
Visualize test forecast
submodels_calibrated_tbl %>%
  modeltime_forecast(
    new_data = testing(splits),
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
Refit on full training dataset
submodels_refit_tbl <- submodels_calibrated_tbl %>%
  modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
Visualize Submodel Forecast
submodels_refit_tbl%>%
  modeltime_forecast(
    new_data =  future_tbl,
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
Validación cruzada

https://cran.r-project.org/web/packages/modeltime.resample/vignettes/getting-started.html

resamples_tscv <- time_series_cv(
    data        = data_prepared_tbl,
    date_var    = Date,
    assess      = FORECAST_HORIZON,
    initial     = "36 month",
    skip        = FORECAST_HORIZON,
    slice_limit = 5
)

resamples_tscv
## # Time Series Cross Validation Plan 
## # A tibble: 5 × 2
##   splits         id    
##   <list>         <chr> 
## 1 <split [36/5]> Slice1
## 2 <split [36/5]> Slice2
## 3 <split [36/5]> Slice3
## 4 <split [36/5]> Slice4
## 5 <split [36/5]> Slice5
resamples_tscv %>%
    tk_time_series_cv_plan() %>%
    plot_time_series_cv_plan(Date, 
                             value, 
                             .facet_ncol = 2,
                             .interactive = T)

Generate Resample Predictions

resamples_fitted <- submodels_tbl %>%
    modeltime_fit_resamples(
        resamples = resamples_tscv,
        control   = control_resamples(verbose = FALSE)
    )

resamples_fitted
## # Modeltime Table
## # A tibble: 4 × 4
##   .model_id .model     .model_desc                              .resample_resul…
##       <int> <list>     <chr>                                    <list>          
## 1         1 <workflow> ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRO… <rsmp[+]>       
## 2         2 <workflow> PROPHET W/ XGBOOST ERRORS                <rsmp[+]>       
## 3         3 <workflow> XGBOOST                                  <rsmp[+]>       
## 4         4 <workflow> RANDOMFOREST                             <rsmp[+]>

Evaluate the Results

resamples_fitted %>%
    plot_modeltime_resamples(
      .point_size  = 3, 
      .point_alpha = 0.8,
      .interactive = T
    )
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
resamples_fitted %>%
    modeltime_resample_accuracy(summary_fns = mean) %>%
    table_modeltime_accuracy(.interactive = T)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
ENSEMBLE
Ensamble Media y Meta-Learner
ensemble_fit_mean <- submodels_tbl %>%
  #filter(!.model_id %in% c(1))%>%
  ensemble_average(type="mean")


ensemble_fit_lm <- resamples_fitted %>%
  ensemble_model_spec(
    model_spec = linear_reg(
      penalty = tune(),
      mixture = tune()
    ) %>%
      set_engine("glmnet"),
    grid = 2,
    control = control_grid(verbose = TRUE)
  )
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/2
## ✓ Fold1: preprocessor 1/1, model 1/2
## i Fold1: preprocessor 1/1, model 1/2 (predictions)
## i Fold1: preprocessor 1/1, model 2/2
## ✓ Fold1: preprocessor 1/1, model 2/2
## i Fold1: preprocessor 1/1, model 2/2 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/2
## ✓ Fold2: preprocessor 1/1, model 1/2
## i Fold2: preprocessor 1/1, model 1/2 (predictions)
## i Fold2: preprocessor 1/1, model 2/2
## ✓ Fold2: preprocessor 1/1, model 2/2
## i Fold2: preprocessor 1/1, model 2/2 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/2
## ✓ Fold3: preprocessor 1/1, model 1/2
## i Fold3: preprocessor 1/1, model 1/2 (predictions)
## i Fold3: preprocessor 1/1, model 2/2
## ✓ Fold3: preprocessor 1/1, model 2/2
## i Fold3: preprocessor 1/1, model 2/2 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/2
## ✓ Fold4: preprocessor 1/1, model 1/2
## i Fold4: preprocessor 1/1, model 1/2 (predictions)
## i Fold4: preprocessor 1/1, model 2/2
## ✓ Fold4: preprocessor 1/1, model 2/2
## i Fold4: preprocessor 1/1, model 2/2 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/2
## ✓ Fold5: preprocessor 1/1, model 1/2
## i Fold5: preprocessor 1/1, model 1/2 (predictions)
## i Fold5: preprocessor 1/1, model 2/2
## ✓ Fold5: preprocessor 1/1, model 2/2
## i Fold5: preprocessor 1/1, model 2/2 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 8
##   penalty mixture .metric .estimator   mean     n std_err .config             
##     <dbl>   <dbl> <chr>   <chr>       <dbl> <int>   <dbl> <chr>               
## 1 0.00116  0.0746 rmse    standard   95318.     5  14276. Preprocessor1_Model1
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
##   .model_id    rmse .model_desc                              
##   <chr>       <dbl> <chr>                                    
## 1 1         128924. ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS
## 2 2         116572. PROPHET W/ XGBOOST ERRORS                
## 3 3         121334. XGBOOST                                  
## 4 4         126479. RANDOMFOREST                             
## 5 ensemble   86861. ENSEMBLE (MODEL SPEC)                    
## 
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: linear_reg()
## 
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
## 
## ── Model ───────────────────────────────────────────────────────────────────────
## 
## Call:  glmnet::glmnet(x = maybe_matrix(x), y = y, family = "gaussian",      alpha = ~0.0745552129927091) 
## 
##     Df  %Dev  Lambda
## 1    0  0.00 1348000
## 2    3  2.00 1228000
## 3    4  4.77 1119000
## 4    4  7.76 1020000
## 5    4 10.72  929000
## 6    4 13.65  846400
## 7    4 16.53  771200
## 8    4 19.33  702700
## 9    4 22.04  640300
## 10   4 24.66  583400
## 11   4 27.16  531600
## 12   4 29.54  484400
## 13   4 31.79  441300
## 14   4 33.91  402100
## 15   4 35.88  366400
## 16   4 37.71  333900
## 17   4 39.40  304200
## 18   4 40.95  277200
## 19   4 42.36  252500
## 20   4 43.65  230100
## 21   4 44.82  209700
## 22   4 45.87  191000
## 23   4 46.81  174100
## 24   4 47.65  158600
## 25   4 48.41  144500
## 26   4 49.08  131700
## 27   4 49.69  120000
## 28   4 50.22  109300
## 29   4 50.70   99610
## 30   4 51.14   90760
## 31   4 51.52   82700
## 32   4 51.87   75350
## 33   4 52.19   68660
## 34   4 52.48   62560
## 35   4 52.75   57000
## 36   4 53.00   51940
## 37   4 53.24   47320
## 38   4 53.46   43120
## 39   4 53.67   39290
## 40   4 53.88   35800
## 41   4 54.07   32620
## 42   3 54.26   29720
## 43   3 54.36   27080
## 44   3 54.45   24670
## 45   3 54.53   22480
## 46   3 54.61   20480
## 
## ...
## and 54 more lines.
## 
## 6.911 sec elapsed
ensemble_fit_xg<- resamples_fitted %>%
  ensemble_model_spec(
    model_spec = boost_tree(
      mtry=tune(),
      trees=tune(),
      learn_rate = tune()
    ) %>% set_engine("xgboost"),
    control = control_grid(verbose = TRUE)
  )
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Creating pre-processing data to finalize unknown parameter: mtry
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/6
## ✓ Fold1: preprocessor 1/1, model 1/6
## i Fold1: preprocessor 1/1, model 1/6 (predictions)
## i Fold1: preprocessor 1/1, model 2/6
## ✓ Fold1: preprocessor 1/1, model 2/6
## i Fold1: preprocessor 1/1, model 2/6 (predictions)
## i Fold1: preprocessor 1/1, model 3/6
## ✓ Fold1: preprocessor 1/1, model 3/6
## i Fold1: preprocessor 1/1, model 3/6 (predictions)
## i Fold1: preprocessor 1/1, model 4/6
## ✓ Fold1: preprocessor 1/1, model 4/6
## i Fold1: preprocessor 1/1, model 4/6 (predictions)
## i Fold1: preprocessor 1/1, model 5/6
## ✓ Fold1: preprocessor 1/1, model 5/6
## i Fold1: preprocessor 1/1, model 5/6 (predictions)
## i Fold1: preprocessor 1/1, model 6/6
## ✓ Fold1: preprocessor 1/1, model 6/6
## i Fold1: preprocessor 1/1, model 6/6 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/6
## ✓ Fold2: preprocessor 1/1, model 1/6
## i Fold2: preprocessor 1/1, model 1/6 (predictions)
## i Fold2: preprocessor 1/1, model 2/6
## ✓ Fold2: preprocessor 1/1, model 2/6
## i Fold2: preprocessor 1/1, model 2/6 (predictions)
## i Fold2: preprocessor 1/1, model 3/6
## ✓ Fold2: preprocessor 1/1, model 3/6
## i Fold2: preprocessor 1/1, model 3/6 (predictions)
## i Fold2: preprocessor 1/1, model 4/6
## ✓ Fold2: preprocessor 1/1, model 4/6
## i Fold2: preprocessor 1/1, model 4/6 (predictions)
## i Fold2: preprocessor 1/1, model 5/6
## ✓ Fold2: preprocessor 1/1, model 5/6
## i Fold2: preprocessor 1/1, model 5/6 (predictions)
## i Fold2: preprocessor 1/1, model 6/6
## ✓ Fold2: preprocessor 1/1, model 6/6
## i Fold2: preprocessor 1/1, model 6/6 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/6
## ✓ Fold3: preprocessor 1/1, model 1/6
## i Fold3: preprocessor 1/1, model 1/6 (predictions)
## i Fold3: preprocessor 1/1, model 2/6
## ✓ Fold3: preprocessor 1/1, model 2/6
## i Fold3: preprocessor 1/1, model 2/6 (predictions)
## i Fold3: preprocessor 1/1, model 3/6
## ✓ Fold3: preprocessor 1/1, model 3/6
## i Fold3: preprocessor 1/1, model 3/6 (predictions)
## i Fold3: preprocessor 1/1, model 4/6
## ✓ Fold3: preprocessor 1/1, model 4/6
## i Fold3: preprocessor 1/1, model 4/6 (predictions)
## i Fold3: preprocessor 1/1, model 5/6
## ✓ Fold3: preprocessor 1/1, model 5/6
## i Fold3: preprocessor 1/1, model 5/6 (predictions)
## i Fold3: preprocessor 1/1, model 6/6
## ✓ Fold3: preprocessor 1/1, model 6/6
## i Fold3: preprocessor 1/1, model 6/6 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/6
## ✓ Fold4: preprocessor 1/1, model 1/6
## i Fold4: preprocessor 1/1, model 1/6 (predictions)
## i Fold4: preprocessor 1/1, model 2/6
## ✓ Fold4: preprocessor 1/1, model 2/6
## i Fold4: preprocessor 1/1, model 2/6 (predictions)
## i Fold4: preprocessor 1/1, model 3/6
## ✓ Fold4: preprocessor 1/1, model 3/6
## i Fold4: preprocessor 1/1, model 3/6 (predictions)
## i Fold4: preprocessor 1/1, model 4/6
## ✓ Fold4: preprocessor 1/1, model 4/6
## i Fold4: preprocessor 1/1, model 4/6 (predictions)
## i Fold4: preprocessor 1/1, model 5/6
## ✓ Fold4: preprocessor 1/1, model 5/6
## i Fold4: preprocessor 1/1, model 5/6 (predictions)
## i Fold4: preprocessor 1/1, model 6/6
## ✓ Fold4: preprocessor 1/1, model 6/6
## i Fold4: preprocessor 1/1, model 6/6 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/6
## ✓ Fold5: preprocessor 1/1, model 1/6
## i Fold5: preprocessor 1/1, model 1/6 (predictions)
## i Fold5: preprocessor 1/1, model 2/6
## ✓ Fold5: preprocessor 1/1, model 2/6
## i Fold5: preprocessor 1/1, model 2/6 (predictions)
## i Fold5: preprocessor 1/1, model 3/6
## ✓ Fold5: preprocessor 1/1, model 3/6
## i Fold5: preprocessor 1/1, model 3/6 (predictions)
## i Fold5: preprocessor 1/1, model 4/6
## ✓ Fold5: preprocessor 1/1, model 4/6
## i Fold5: preprocessor 1/1, model 4/6 (predictions)
## i Fold5: preprocessor 1/1, model 5/6
## ✓ Fold5: preprocessor 1/1, model 5/6
## i Fold5: preprocessor 1/1, model 5/6 (predictions)
## i Fold5: preprocessor 1/1, model 6/6
## ✓ Fold5: preprocessor 1/1, model 6/6
## i Fold5: preprocessor 1/1, model 6/6 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 9
##    mtry trees learn_rate .metric .estimator   mean     n std_err .config        
##   <int> <int>      <dbl> <chr>   <chr>       <dbl> <int>   <dbl> <chr>          
## 1     2   237     0.0310 rmse    standard   82503.     5   5758. Preprocessor1_…
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
##   .model_id    rmse .model_desc                              
##   <chr>       <dbl> <chr>                                    
## 1 1         128924. ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS
## 2 2         116572. PROPHET W/ XGBOOST ERRORS                
## 3 3         121334. XGBOOST                                  
## 4 4         126479. RANDOMFOREST                             
## 5 ensemble    6460. ENSEMBLE (MODEL SPEC)                    
## 
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: boost_tree()
## 
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
## 
## ── Model ───────────────────────────────────────────────────────────────────────
## ##### xgb.Booster
## raw: 307.9 Kb 
## call:
##   xgboost::xgb.train(params = list(eta = 0.0310189818351432, max_depth = 6, 
##     gamma = 0, colsample_bytree = 1, colsample_bynode = 0.5, 
##     min_child_weight = 1, subsample = 1, objective = "reg:squarederror"), 
##     data = x$data, nrounds = 237L, watchlist = x$watchlist, verbose = 0, 
##     nthread = 1)
## params (as set within xgb.train):
##   eta = "0.0310189818351432", max_depth = "6", gamma = "0", colsample_bytree = "1", colsample_bynode = "0.5", min_child_weight = "1", subsample = "1", objective = "reg:squarederror", nthread = "1", validate_parameters = "TRUE"
## xgb.attributes:
##   niter
## callbacks:
##   cb.evaluation.log()
## # of features: 4 
## niter: 237
## nfeatures : 4 
## evaluation_log:
##     iter training_rmse
##        1    906700.312
##        2    880271.438
## ---                   
##      236      6567.214
##      237      6459.876
## 
## 3243.345 sec elapsed
ensemble_tbl<- modeltime_table(
  ensemble_fit_mean,
  ensemble_fit_lm,
  ensemble_fit_xg
)
Ensemble test Accuracy
ensemble_tbl%>%
  combine_modeltime_tables(submodels_tbl)%>%
  modeltime_accuracy(testing(splits))%>%
  arrange(rmse,mae,mape,mase)%>%
  knitr::kable()
.model_id .model_desc .type mae mape mase smape rmse rsq
4 ARIMA(1,1,2)(2,0,0)[12] W/ XGBOOST ERRORS Test 32498.55 3.200357 0.5677113 3.206721 37479.12 0.8764449
1 ENSEMBLE (MEAN): 4 MODELS Test 31310.58 2.929882 0.5469589 3.025890 49992.18 0.8392449
3 ENSEMBLE (XGBOOST STACK): 4 MODELS Test 55709.96 5.446423 0.9731873 5.593512 62790.39 0.8177650
2 ENSEMBLE (GLMNET STACK): 4 MODELS Test 53531.57 5.231201 0.9351335 5.281428 63683.65 0.3142295
5 PROPHET W/ XGBOOST ERRORS Test 52131.10 5.268918 0.9106689 5.210271 64435.97 0.9630310
6 XGBOOST Test 46463.83 4.473893 0.8116684 4.667059 66341.33 0.5212406
7 RANDOMFOREST Test 58365.82 5.610125 1.0195821 5.842129 71946.82 0.9234213
Ensemble Test Forecast
ensemble_tbl%>%
  modeltime_calibrate(testing(splits))%>%
  modeltime_forecast(
    new_data =  testing(splits),
    actual_data = data_prepared_tbl,
    keep_data = T,
    conf_by_id = T,
    conf_interval = 0.95
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
## Warning: The 'id' column in calibration data was not detected. Global Confidence
## Interval is being returned.
ensemble_tbl%>%
  combine_modeltime_tables(submodels_tbl)%>%
  modeltime_calibrate(testing(splits))%>%
  modeltime_forecast(
    new_data =  testing(splits),
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
ensemble_tbl_all_model<-ensemble_tbl%>%
  combine_modeltime_tables(submodels_tbl)
Refit Ensemble
ensemble_refit_tbl <- ensemble_tbl%>%
  modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
Visualize Ensemble Forecast
ensemble_refit_tbl%>%
  modeltime_forecast(
    new_data =  future_tbl,
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
## Warning: Expecting the following names to be in the data frame: .conf_hi, .conf_lo. 
## Proceeding with '.conf_interval_show = FALSE' to visualize the forecast without confidence intervals.
## Alternatively, try using `modeltime_calibrate()` before forecasting to add confidence intervals.

Serie en Dolares

DATA
dolares%>%
  plot_time_series(Date,value,.facet_ncol = 3, .interactive = F)

DATA PREPARATION
FORECAST_HORIZON <- 5
Full = Training + Forecast Dataset
full_data_tbl <- dolares%>%
  select(Date,value)%>%
  future_frame(
    .date_var = Date,
    .length_out = FORECAST_HORIZON,
    .bind_data = T
  )
Training Data
data_prepared_tbl <- full_data_tbl[!is.na(full_data_tbl$value),]
  
# data_prepared_tbl%>%
#   tk_summary_diagnostics()
Future Data Forecast
future_tbl <- full_data_tbl[is.na(full_data_tbl$value),]
SPLITTING
splits <- data_prepared_tbl%>%
  arrange(Date)%>%
  time_series_split(
    data_var=Date,
    assess = FORECAST_HORIZON,
    cumulative = T
  )
## Using date_var: Date
splits
## <Analysis/Assess/Total>
## <241/5/246>
PREPROCESOR
recipe_spec_1 <- recipe(value~., training(splits))%>%
  step_timeseries_signature(Date)%>%
  ## Elimina las columnas o atributos que no aportan
  step_rm(matches("(.iso$)|(.xts)|(day)|(hour)|(minute)|(second)|(am.pm)|(week)")) %>%
  step_normalize(Date_index.num,Date_year)%>%
  step_mutate(Date_month = factor(Date_month,ordered = T))%>%
  step_dummy(all_nominal(),one_hot = T)

recipe_spec_1 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date              <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value             <dbl> 21.8817, 24.1889, 24.6323, 30.7223, 30.6749, 31.9302…
## $ Date_index.num    <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year         <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half         <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter      <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02     <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03     <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04     <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05     <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06     <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07     <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08     <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09     <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_2 <- recipe_spec_1%>%
  update_role(Date,new_role = "ID")

recipe_spec_2 %>% prep() %>% juice() %>% glimpse()
## Rows: 241
## Columns: 30
## $ Date              <date> 2001-02-01, 2001-03-01, 2001-04-01, 2001-05-01, 200…
## $ value             <dbl> 21.8817, 24.1889, 24.6323, 30.7223, 30.6749, 31.9302…
## $ Date_index.num    <dbl> -1.720507, -1.707312, -1.692703, -1.678565, -1.66395…
## $ Date_year         <dbl> -1.656391, -1.656391, -1.656391, -1.656391, -1.65639…
## $ Date_half         <int> 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2…
## $ Date_quarter      <int> 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 1, 1, 1, 2, 2, 2, 3…
## $ Date_month_01     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month_02     <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month_03     <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month_04     <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month_05     <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month_06     <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month_07     <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month_08     <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_09     <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_10     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_11     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month_12     <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_01 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_02 <dbl> 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0…
## $ Date_month.lbl_03 <dbl> 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0…
## $ Date_month.lbl_04 <dbl> 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0…
## $ Date_month.lbl_05 <dbl> 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0…
## $ Date_month.lbl_06 <dbl> 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0…
## $ Date_month.lbl_07 <dbl> 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1…
## $ Date_month.lbl_08 <dbl> 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_09 <dbl> 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_10 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_11 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0…
## $ Date_month.lbl_12 <dbl> 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0…
recipe_spec_1 %>% prep() %>% summary()
## # A tibble: 30 × 4
##    variable       type    role      source  
##    <chr>          <chr>   <chr>     <chr>   
##  1 Date           date    predictor original
##  2 value          numeric outcome   original
##  3 Date_index.num numeric predictor derived 
##  4 Date_year      numeric predictor derived 
##  5 Date_half      numeric predictor derived 
##  6 Date_quarter   numeric predictor derived 
##  7 Date_month_01  numeric predictor derived 
##  8 Date_month_02  numeric predictor derived 
##  9 Date_month_03  numeric predictor derived 
## 10 Date_month_04  numeric predictor derived 
## # … with 20 more rows
recipe_spec_2 %>% prep() %>% summary()
## # A tibble: 30 × 4
##    variable       type    role      source  
##    <chr>          <chr>   <chr>     <chr>   
##  1 Date           date    ID        original
##  2 value          numeric outcome   original
##  3 Date_index.num numeric predictor derived 
##  4 Date_year      numeric predictor derived 
##  5 Date_half      numeric predictor derived 
##  6 Date_quarter   numeric predictor derived 
##  7 Date_month_01  numeric predictor derived 
##  8 Date_month_02  numeric predictor derived 
##  9 Date_month_03  numeric predictor derived 
## 10 Date_month_04  numeric predictor derived 
## # … with 20 more rows
MODELS
autoarima xgboost
wflw_fit_autoarima_boost <- workflow()%>%
  add_model(
    arima_boost(
    min_n = 2,
    learn_rate = 0.015
) %>%
    set_engine(engine = "auto_arima_xgboost")
  )%>%
  add_recipe(recipe_spec_1)%>%
  fit(training(splits))
## frequency = 12 observations per 1 year
prophet
wflw_fit_prophet <- workflow()%>%
  add_model(
    prophet_reg() %>% set_engine("prophet")
  )%>%
  add_recipe(recipe_spec_1)%>%
  fit(training(splits))
## Disabling weekly seasonality. Run prophet with weekly.seasonality=TRUE to override this.
## Disabling daily seasonality. Run prophet with daily.seasonality=TRUE to override this.
XGBOOST
wflw_fit_xgboost_0_015 <- workflow()%>%
  add_model(
    boost_tree(learn_rate=0.15) %>% set_engine("xgboost")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_xgboost_0_1 <- workflow()%>%
  add_model(
    boost_tree(learn_rate=0.1) %>% set_engine("xgboost")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_xgboost_0_3 <- workflow()%>%
  add_model(
    boost_tree(learn_rate=0.3) %>% set_engine("xgboost")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))
Random Forest
wflw_fit_rf_1000 <- workflow()%>%
  add_model(
    rand_forest(
                trees = 1000
                ) %>% 
      set_engine("randomForest")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_rf_500 <- workflow()%>%
  add_model(
    rand_forest(
                trees = 500
                ) %>% 
      set_engine("randomForest")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))

wflw_fit_rf_200 <- workflow()%>%
  add_model(
    rand_forest(
                trees = 200
                ) %>% 
      set_engine("randomForest")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))
SVM
wflw_fit_svm <- workflow()%>%
  add_model(
    svm_rbf() %>% set_engine("kernlab")
  )%>%
  add_recipe(recipe_spec_2)%>%
  fit(training(splits))
prophet_boost
wflw_fit_prophet_boost <- workflow()%>%
  add_model(
    prophet_boost(
      seasonality_yearly = F,
      seasonality_weekly = F,
      seasonality_daily =  F,
    ) %>% 
      set_engine("prophet_xgboost")
  )%>%
  add_recipe(recipe_spec_1)%>%
  fit(training(splits))
MODELTIME WORKFLOW
modeltime table
submodels_tbl <- modeltime_table(
  wflw_fit_autoarima_boost,
  #wflw_fit_prophet, #1
  wflw_fit_prophet_boost, #2
  #wflw_fit_xgboost_0_015, #3
  #wflw_fit_xgboost_0_1, #4
  wflw_fit_xgboost_0_3, #5
  #wflw_fit_rf_1000, #6
  wflw_fit_rf_500 #, #7
  #wflw_fit_rf_200, #8
  #wflw_fit_svm #9
)

submodels_tbl
## # Modeltime Table
## # A tibble: 4 × 3
##   .model_id .model     .model_desc                                         
##       <int> <list>     <chr>                                               
## 1         1 <workflow> ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS
## 2         2 <workflow> PROPHET W/ XGBOOST ERRORS                           
## 3         3 <workflow> XGBOOST                                             
## 4         4 <workflow> RANDOMFOREST
calibrate Testing Data
submodels_calibrated_tbl <- submodels_tbl %>%
  modeltime_calibrate(testing(splits))

submodels_calibrated_tbl
## # Modeltime Table
## # A tibble: 4 × 5
##   .model_id .model     .model_desc                        .type .calibration_da…
##       <int> <list>     <chr>                              <chr> <list>          
## 1         1 <workflow> ARIMA(0,1,0)(1,0,0)[12] WITH DRIF… Test  <tibble [5 × 4]>
## 2         2 <workflow> PROPHET W/ XGBOOST ERRORS          Test  <tibble [5 × 4]>
## 3         3 <workflow> XGBOOST                            Test  <tibble [5 × 4]>
## 4         4 <workflow> RANDOMFOREST                       Test  <tibble [5 × 4]>
Measure Test Accuracy
submodels_calibrated_tbl%>% 
  modeltime_accuracy()%>%
  arrange(rmse)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
## # A tibble: 4 × 9
##   .model_id .model_desc              .type   mae  mape  mase smape  rmse     rsq
##       <int> <chr>                    <chr> <dbl> <dbl> <dbl> <dbl> <dbl>   <dbl>
## 1         1 ARIMA(0,1,0)(1,0,0)[12]… Test   66.6  3.76 0.774  3.81  75.8  0.792 
## 2         2 PROPHET W/ XGBOOST ERRO… Test  118.   6.53 1.37   6.83 141.   0.0919
## 3         3 XGBOOST                  Test  168.   9.34 1.95   9.93 192.  NA     
## 4         4 RANDOMFOREST             Test  316.  17.8  3.67  19.7  325.   0.606
Visualize test forecast
submodels_calibrated_tbl %>%
  modeltime_forecast(
    new_data = testing(splits),
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
Refit on full training dataset
submodels_refit_tbl <- submodels_calibrated_tbl %>%
  modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
Visualize Submodel Forecast
submodels_refit_tbl%>%
  modeltime_forecast(
    new_data =  future_tbl,
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
Validación cruzada

https://cran.r-project.org/web/packages/modeltime.resample/vignettes/getting-started.html

resamples_tscv <- time_series_cv(
    data        = data_prepared_tbl,
    date_var    = Date,
    assess      = FORECAST_HORIZON,
    initial     = "36 month",
    skip        = FORECAST_HORIZON,
    slice_limit = 5
)

resamples_tscv
## # Time Series Cross Validation Plan 
## # A tibble: 5 × 2
##   splits         id    
##   <list>         <chr> 
## 1 <split [36/5]> Slice1
## 2 <split [36/5]> Slice2
## 3 <split [36/5]> Slice3
## 4 <split [36/5]> Slice4
## 5 <split [36/5]> Slice5
resamples_tscv %>%
    tk_time_series_cv_plan() %>%
    plot_time_series_cv_plan(Date, 
                             value, 
                             .facet_ncol = 2,
                             .interactive = T)

Generate Resample Predictions

resamples_fitted <- submodels_tbl %>%
    modeltime_fit_resamples(
        resamples = resamples_tscv,
        control   = control_resamples(verbose = FALSE)
    )

resamples_fitted
## # Modeltime Table
## # A tibble: 4 × 4
##   .model_id .model     .model_desc                              .resample_resul…
##       <int> <list>     <chr>                                    <list>          
## 1         1 <workflow> ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ X… <rsmp[+]>       
## 2         2 <workflow> PROPHET W/ XGBOOST ERRORS                <rsmp[+]>       
## 3         3 <workflow> XGBOOST                                  <rsmp[+]>       
## 4         4 <workflow> RANDOMFOREST                             <rsmp[+]>

Evaluate the Results

resamples_fitted %>%
    plot_modeltime_resamples(
      .point_size  = 3, 
      .point_alpha = 0.8,
      .interactive = T
    )
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
resamples_fitted %>%
    modeltime_resample_accuracy(summary_fns = mean) %>%
    table_modeltime_accuracy(.interactive = T)
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
ENSEMBLE
Ensamble Media y Meta-Learner
ensemble_fit_mean <- submodels_tbl %>%
  #filter(!.model_id %in% c(1))%>%
  ensemble_average(type="mean")


ensemble_fit_lm <- resamples_fitted %>%
  ensemble_model_spec(
    model_spec = linear_reg(
      penalty = tune(),
      mixture = tune()
    ) %>%
      set_engine("glmnet"),
    grid = 2,
    control = control_grid(verbose = TRUE)
  )
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/2
## ✓ Fold1: preprocessor 1/1, model 1/2
## i Fold1: preprocessor 1/1, model 1/2 (predictions)
## i Fold1: preprocessor 1/1, model 2/2
## ✓ Fold1: preprocessor 1/1, model 2/2
## i Fold1: preprocessor 1/1, model 2/2 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/2
## ✓ Fold2: preprocessor 1/1, model 1/2
## i Fold2: preprocessor 1/1, model 1/2 (predictions)
## i Fold2: preprocessor 1/1, model 2/2
## ✓ Fold2: preprocessor 1/1, model 2/2
## i Fold2: preprocessor 1/1, model 2/2 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/2
## ✓ Fold3: preprocessor 1/1, model 1/2
## i Fold3: preprocessor 1/1, model 1/2 (predictions)
## i Fold3: preprocessor 1/1, model 2/2
## ✓ Fold3: preprocessor 1/1, model 2/2
## i Fold3: preprocessor 1/1, model 2/2 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/2
## ✓ Fold4: preprocessor 1/1, model 1/2
## i Fold4: preprocessor 1/1, model 1/2 (predictions)
## i Fold4: preprocessor 1/1, model 2/2
## ✓ Fold4: preprocessor 1/1, model 2/2
## i Fold4: preprocessor 1/1, model 2/2 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/2
## ✓ Fold5: preprocessor 1/1, model 1/2
## i Fold5: preprocessor 1/1, model 1/2 (predictions)
## i Fold5: preprocessor 1/1, model 2/2
## ✓ Fold5: preprocessor 1/1, model 2/2
## i Fold5: preprocessor 1/1, model 2/2 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 8
##       penalty mixture .metric .estimator  mean     n std_err .config            
##         <dbl>   <dbl> <chr>   <chr>      <dbl> <int>   <dbl> <chr>              
## 1 0.000000268   0.196 rmse    standard    102.     5    12.6 Preprocessor1_Mode…
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
##   .model_id  rmse .model_desc                                         
##   <chr>     <dbl> <chr>                                               
## 1 1         178.  ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS
## 2 2         159.  PROPHET W/ XGBOOST ERRORS                           
## 3 3         189.  XGBOOST                                             
## 4 4         207.  RANDOMFOREST                                        
## 5 ensemble   74.3 ENSEMBLE (MODEL SPEC)                               
## 
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: linear_reg()
## 
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
## 
## ── Model ───────────────────────────────────────────────────────────────────────
## 
## Call:  glmnet::glmnet(x = maybe_matrix(x), y = y, family = "gaussian",      alpha = ~0.196406674844911) 
## 
##     Df  %Dev  Lambda
## 1    0  0.00 1092.00
## 2    3  6.19  994.80
## 3    4 13.36  906.40
## 4    4 20.23  825.90
## 5    4 26.64  752.50
## 6    4 32.58  685.70
## 7    4 38.06  624.80
## 8    4 43.07  569.30
## 9    4 47.65  518.70
## 10   4 51.80  472.60
## 11   4 55.53  430.60
## 12   4 58.88  392.40
## 13   4 61.87  357.50
## 14   4 64.53  325.80
## 15   4 66.88  296.80
## 16   4 68.95  270.40
## 17   4 70.77  246.40
## 18   4 72.36  224.50
## 19   4 73.76  204.60
## 20   4 74.97  186.40
## 21   4 76.03  169.80
## 22   4 76.95  154.80
## 23   4 77.74  141.00
## 24   4 78.44  128.50
## 25   4 79.04  117.10
## 26   4 79.57  106.70
## 27   4 80.04   97.19
## 28   4 80.44   88.56
## 29   4 80.81   80.69
## 30   4 81.13   73.52
## 31   4 81.42   66.99
## 32   4 81.69   61.04
## 33   4 81.94   55.62
## 34   4 82.17   50.68
## 35   4 82.39   46.17
## 36   4 82.60   42.07
## 37   4 82.80   38.33
## 38   3 82.89   34.93
## 39   3 82.97   31.83
## 40   3 83.04   29.00
## 41   3 83.10   26.42
## 42   3 83.15   24.08
## 43   3 83.19   21.94
## 44   3 83.24   19.99
## 45   3 83.27   18.21
## 46   3 83.31   16.59
## 
## ...
## and 54 more lines.
## 
## 7.059 sec elapsed
ensemble_fit_xg<- resamples_fitted %>%
  ensemble_model_spec(
    model_spec = boost_tree(
      mtry=tune(),
      trees=tune(),
      learn_rate = tune()
    ) %>% set_engine("xgboost"),
    control = control_grid(verbose = TRUE)
  )
## ── Tuning Model Specification ───────────────────────────────────
## ℹ Performing 5-Fold Cross Validation.
## i Creating pre-processing data to finalize unknown parameter: mtry
## i Fold1: preprocessor 1/1
## ✓ Fold1: preprocessor 1/1
## i Fold1: preprocessor 1/1, model 1/6
## ✓ Fold1: preprocessor 1/1, model 1/6
## i Fold1: preprocessor 1/1, model 1/6 (predictions)
## i Fold1: preprocessor 1/1, model 2/6
## ✓ Fold1: preprocessor 1/1, model 2/6
## i Fold1: preprocessor 1/1, model 2/6 (predictions)
## i Fold1: preprocessor 1/1, model 3/6
## ✓ Fold1: preprocessor 1/1, model 3/6
## i Fold1: preprocessor 1/1, model 3/6 (predictions)
## i Fold1: preprocessor 1/1, model 4/6
## ✓ Fold1: preprocessor 1/1, model 4/6
## i Fold1: preprocessor 1/1, model 4/6 (predictions)
## i Fold1: preprocessor 1/1, model 5/6
## ✓ Fold1: preprocessor 1/1, model 5/6
## i Fold1: preprocessor 1/1, model 5/6 (predictions)
## i Fold1: preprocessor 1/1, model 6/6
## ✓ Fold1: preprocessor 1/1, model 6/6
## i Fold1: preprocessor 1/1, model 6/6 (predictions)
## i Fold2: preprocessor 1/1
## ✓ Fold2: preprocessor 1/1
## i Fold2: preprocessor 1/1, model 1/6
## ✓ Fold2: preprocessor 1/1, model 1/6
## i Fold2: preprocessor 1/1, model 1/6 (predictions)
## i Fold2: preprocessor 1/1, model 2/6
## ✓ Fold2: preprocessor 1/1, model 2/6
## i Fold2: preprocessor 1/1, model 2/6 (predictions)
## i Fold2: preprocessor 1/1, model 3/6
## ✓ Fold2: preprocessor 1/1, model 3/6
## i Fold2: preprocessor 1/1, model 3/6 (predictions)
## i Fold2: preprocessor 1/1, model 4/6
## ✓ Fold2: preprocessor 1/1, model 4/6
## i Fold2: preprocessor 1/1, model 4/6 (predictions)
## i Fold2: preprocessor 1/1, model 5/6
## ✓ Fold2: preprocessor 1/1, model 5/6
## i Fold2: preprocessor 1/1, model 5/6 (predictions)
## i Fold2: preprocessor 1/1, model 6/6
## ✓ Fold2: preprocessor 1/1, model 6/6
## i Fold2: preprocessor 1/1, model 6/6 (predictions)
## i Fold3: preprocessor 1/1
## ✓ Fold3: preprocessor 1/1
## i Fold3: preprocessor 1/1, model 1/6
## ✓ Fold3: preprocessor 1/1, model 1/6
## i Fold3: preprocessor 1/1, model 1/6 (predictions)
## i Fold3: preprocessor 1/1, model 2/6
## ✓ Fold3: preprocessor 1/1, model 2/6
## i Fold3: preprocessor 1/1, model 2/6 (predictions)
## i Fold3: preprocessor 1/1, model 3/6
## ✓ Fold3: preprocessor 1/1, model 3/6
## i Fold3: preprocessor 1/1, model 3/6 (predictions)
## i Fold3: preprocessor 1/1, model 4/6
## ✓ Fold3: preprocessor 1/1, model 4/6
## i Fold3: preprocessor 1/1, model 4/6 (predictions)
## i Fold3: preprocessor 1/1, model 5/6
## ✓ Fold3: preprocessor 1/1, model 5/6
## i Fold3: preprocessor 1/1, model 5/6 (predictions)
## i Fold3: preprocessor 1/1, model 6/6
## ✓ Fold3: preprocessor 1/1, model 6/6
## i Fold3: preprocessor 1/1, model 6/6 (predictions)
## i Fold4: preprocessor 1/1
## ✓ Fold4: preprocessor 1/1
## i Fold4: preprocessor 1/1, model 1/6
## ✓ Fold4: preprocessor 1/1, model 1/6
## i Fold4: preprocessor 1/1, model 1/6 (predictions)
## i Fold4: preprocessor 1/1, model 2/6
## ✓ Fold4: preprocessor 1/1, model 2/6
## i Fold4: preprocessor 1/1, model 2/6 (predictions)
## i Fold4: preprocessor 1/1, model 3/6
## ✓ Fold4: preprocessor 1/1, model 3/6
## i Fold4: preprocessor 1/1, model 3/6 (predictions)
## i Fold4: preprocessor 1/1, model 4/6
## ✓ Fold4: preprocessor 1/1, model 4/6
## i Fold4: preprocessor 1/1, model 4/6 (predictions)
## i Fold4: preprocessor 1/1, model 5/6
## ✓ Fold4: preprocessor 1/1, model 5/6
## i Fold4: preprocessor 1/1, model 5/6 (predictions)
## i Fold4: preprocessor 1/1, model 6/6
## ✓ Fold4: preprocessor 1/1, model 6/6
## i Fold4: preprocessor 1/1, model 6/6 (predictions)
## i Fold5: preprocessor 1/1
## ✓ Fold5: preprocessor 1/1
## i Fold5: preprocessor 1/1, model 1/6
## ✓ Fold5: preprocessor 1/1, model 1/6
## i Fold5: preprocessor 1/1, model 1/6 (predictions)
## i Fold5: preprocessor 1/1, model 2/6
## ✓ Fold5: preprocessor 1/1, model 2/6
## i Fold5: preprocessor 1/1, model 2/6 (predictions)
## i Fold5: preprocessor 1/1, model 3/6
## ✓ Fold5: preprocessor 1/1, model 3/6
## i Fold5: preprocessor 1/1, model 3/6 (predictions)
## i Fold5: preprocessor 1/1, model 4/6
## ✓ Fold5: preprocessor 1/1, model 4/6
## i Fold5: preprocessor 1/1, model 4/6 (predictions)
## i Fold5: preprocessor 1/1, model 5/6
## ✓ Fold5: preprocessor 1/1, model 5/6
## i Fold5: preprocessor 1/1, model 5/6 (predictions)
## i Fold5: preprocessor 1/1, model 6/6
## ✓ Fold5: preprocessor 1/1, model 6/6
## i Fold5: preprocessor 1/1, model 6/6 (predictions)
## ✓ Finished tuning Model Specification.
## ℹ Model Parameters:
## # A tibble: 1 × 9
##    mtry trees learn_rate .metric .estimator  mean     n std_err .config         
##   <int> <int>      <dbl> <chr>   <chr>      <dbl> <int>   <dbl> <chr>           
## 1     2  1701    0.00577 rmse    standard    74.4     5    4.22 Preprocessor1_M…
## ℹ Prediction Error Comparison:
## # A tibble: 5 × 3
##   .model_id   rmse .model_desc                                         
##   <chr>      <dbl> <chr>                                               
## 1 1         178.   ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS
## 2 2         159.   PROPHET W/ XGBOOST ERRORS                           
## 3 3         189.   XGBOOST                                             
## 4 4         207.   RANDOMFOREST                                        
## 5 ensemble    2.54 ENSEMBLE (MODEL SPEC)                               
## 
## ── Final Model ──────────────────────────────────────────────────
## ℹ Model Workflow:
## ══ Workflow [trained] ══════════════════════════════════════════════════════════
## Preprocessor: Recipe
## Model: boost_tree()
## 
## ── Preprocessor ────────────────────────────────────────────────────────────────
## 0 Recipe Steps
## 
## ── Model ───────────────────────────────────────────────────────────────────────
## ##### xgb.Booster
## raw: 2.2 Mb 
## call:
##   xgboost::xgb.train(params = list(eta = 0.00577437481280841, max_depth = 6, 
##     gamma = 0, colsample_bytree = 1, colsample_bynode = 0.5, 
##     min_child_weight = 1, subsample = 1, objective = "reg:squarederror"), 
##     data = x$data, nrounds = 1701L, watchlist = x$watchlist, 
##     verbose = 0, nthread = 1)
## params (as set within xgb.train):
##   eta = "0.00577437481280841", max_depth = "6", gamma = "0", colsample_bytree = "1", colsample_bynode = "0.5", min_child_weight = "1", subsample = "1", objective = "reg:squarederror", nthread = "1", validate_parameters = "TRUE"
## xgb.attributes:
##   niter
## callbacks:
##   cb.evaluation.log()
## # of features: 4 
## niter: 1701
## nfeatures : 4 
## evaluation_log:
##     iter training_rmse
##        1   1426.744385
##        2   1419.037964
## ---                   
##     1700      2.543092
##     1701      2.535558
## 
## 4546.78 sec elapsed
ensemble_tbl<- modeltime_table(
  ensemble_fit_mean,
  ensemble_fit_lm,
  ensemble_fit_xg
)
Ensemble test Accuracy
ensemble_tbl%>%
  combine_modeltime_tables(submodels_tbl)%>%
  modeltime_accuracy(testing(splits))%>%
  arrange(rmse,mae,mape,mase)%>%
  knitr::kable()
## Warning: A correlation computation is required, but `estimate` is constant
## and has 0 standard deviation, resulting in a divide by 0 error. `NA` will be
## returned.
.model_id .model_desc .type mae mape mase smape rmse rsq
4 ARIMA(0,1,0)(1,0,0)[12] WITH DRIFT W/ XGBOOST ERRORS Test 66.62842 3.755342 0.7740205 3.805410 75.78838 0.7921893
3 ENSEMBLE (XGBOOST STACK): 4 MODELS Test 94.37754 5.205527 1.0963813 5.408021 115.76411 0.4953768
5 PROPHET W/ XGBOOST ERRORS Test 118.03429 6.525348 1.3712011 6.833269 141.17847 0.0918866
2 ENSEMBLE (GLMNET STACK): 4 MODELS Test 151.88706 8.464045 1.7644677 8.883949 172.76030 0.8428208
1 ENSEMBLE (MEAN): 4 MODELS Test 154.92191 8.618272 1.7997235 9.113223 175.73619 0.6783658
6 XGBOOST Test 168.01570 9.335913 1.9518337 9.930132 192.05816 NA
7 RANDOMFOREST Test 315.61979 17.838659 3.6665463 19.670737 325.19434 0.6062203
Ensemble Test Forecast
ensemble_tbl%>%
  modeltime_calibrate(testing(splits))%>%
  modeltime_forecast(
    new_data =  testing(splits),
    actual_data = data_prepared_tbl,
    keep_data = T,
    conf_by_id = T,
    conf_interval = 0.95
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
## Warning: The 'id' column in calibration data was not detected. Global Confidence
## Interval is being returned.
ensemble_tbl%>%
  combine_modeltime_tables(submodels_tbl)%>%
  modeltime_calibrate(testing(splits))%>%
  modeltime_forecast(
    new_data =  testing(splits),
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
ensemble_tbl_all_model<-ensemble_tbl%>%
  combine_modeltime_tables(submodels_tbl)
Refit Ensemble
ensemble_refit_tbl <- ensemble_tbl%>%
  modeltime_refit(data_prepared_tbl)
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
## Warning in mdl_time_refit.mdl_time_ensemble_model_spec(...): 'resamples' not
## provided during refitting. Submodels will be refit, but the meta-learner will
## *not* be refit. You can provide 'resamples' via `modeltime_refit(object, data,
## resamples, control)`. Proceeding by refitting the submodels only.
## frequency = 12 observations per 1 year
Visualize Ensemble Forecast
ensemble_refit_tbl%>%
  modeltime_forecast(
    new_data =  future_tbl,
    actual_data = data_prepared_tbl,
    keep_data = T
  )%>%
  plot_modeltime_forecast(
    .facet_ncol=2
  )
## Warning: Expecting the following names to be in the data frame: .conf_hi, .conf_lo. 
## Proceeding with '.conf_interval_show = FALSE' to visualize the forecast without confidence intervals.
## Alternatively, try using `modeltime_calibrate()` before forecasting to add confidence intervals.

3.2.4. Resumen de Modelo

3.2.5. Pronóstico

3.3. Prueba de Tensión

4. Conclusiones

5.Anexos

# otlier_crc
# plot(otlier_crc)
# plot(otlier_usd)
# otlier_usd
#descompo